sha
stringlengths
40
40
text
stringlengths
1
13.4M
id
stringlengths
2
117
tags
sequencelengths
1
7.91k
created_at
stringlengths
25
25
metadata
stringlengths
2
875k
last_modified
stringlengths
25
25
arxiv
sequencelengths
0
25
languages
sequencelengths
0
7.91k
tags_str
stringlengths
17
159k
text_str
stringlengths
1
447k
text_lists
sequencelengths
0
352
processed_texts
sequencelengths
1
353
tokens_length
sequencelengths
1
353
input_texts
sequencelengths
1
40
2ea5c988981fae19ef01a8c51e54fc9a97eeae59
# Dataset Card for "araproje_arc_en_f3" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibranze/araproje_arc_en_f3
[ "region:us" ]
2024-01-04T02:41:10+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "choices", "sequence": [{"name": "text", "dtype": "string"}, {"name": "label", "dtype": "string"}]}, {"name": "answerKey", "dtype": "string"}], "splits": [{"name": "validation", "num_bytes": 80031.0, "num_examples": 250}], "download_size": 0, "dataset_size": 80031.0}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]}
2024-01-04T02:58:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for "araproje_arc_en_f3" More Information needed
[ "# Dataset Card for \"araproje_arc_en_f3\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"araproje_arc_en_f3\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"araproje_arc_en_f3\"\n\nMore Information needed" ]
a99fbba26b1ba7cdcce743b901c03a7e53fc257d
# Dataset Card for "araproje_arc_en_f4" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibranze/araproje_arc_en_f4
[ "region:us" ]
2024-01-04T02:41:15+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "choices", "sequence": [{"name": "text", "dtype": "string"}, {"name": "label", "dtype": "string"}]}, {"name": "answerKey", "dtype": "string"}], "splits": [{"name": "validation", "num_bytes": 80031.0, "num_examples": 250}], "download_size": 0, "dataset_size": 80031.0}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]}
2024-01-04T02:58:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for "araproje_arc_en_f4" More Information needed
[ "# Dataset Card for \"araproje_arc_en_f4\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"araproje_arc_en_f4\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"araproje_arc_en_f4\"\n\nMore Information needed" ]
048ad1f8ac24492181e8e071afde88d29409a937
# Dataset Card for "araproje_arc_en_f5" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibranze/araproje_arc_en_f5
[ "region:us" ]
2024-01-04T02:41:20+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "choices", "sequence": [{"name": "text", "dtype": "string"}, {"name": "label", "dtype": "string"}]}, {"name": "answerKey", "dtype": "string"}], "splits": [{"name": "validation", "num_bytes": 80031.0, "num_examples": 250}], "download_size": 0, "dataset_size": 80031.0}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]}
2024-01-04T02:58:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for "araproje_arc_en_f5" More Information needed
[ "# Dataset Card for \"araproje_arc_en_f5\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"araproje_arc_en_f5\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"araproje_arc_en_f5\"\n\nMore Information needed" ]
c831717ccfce922564944c61f716722e924b0f63
# Dataset Card for "araproje_arc_en_s1" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibranze/araproje_arc_en_s1
[ "region:us" ]
2024-01-04T02:57:15+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "choices", "sequence": [{"name": "text", "dtype": "string"}, {"name": "label", "dtype": "string"}]}, {"name": "answerKey", "dtype": "string"}], "splits": [{"name": "validation", "num_bytes": 80031.0, "num_examples": 250}], "download_size": 47072, "dataset_size": 80031.0}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]}
2024-01-04T02:58:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for "araproje_arc_en_s1" More Information needed
[ "# Dataset Card for \"araproje_arc_en_s1\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"araproje_arc_en_s1\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"araproje_arc_en_s1\"\n\nMore Information needed" ]
d118fa027b096767b74ee965f457d52ade30c1ba
# Dataset Card for "araproje_arc_en_s2" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibranze/araproje_arc_en_s2
[ "region:us" ]
2024-01-04T02:57:19+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "choices", "sequence": [{"name": "text", "dtype": "string"}, {"name": "label", "dtype": "string"}]}, {"name": "answerKey", "dtype": "string"}], "splits": [{"name": "validation", "num_bytes": 80031.0, "num_examples": 250}], "download_size": 46973, "dataset_size": 80031.0}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]}
2024-01-04T02:58:25+00:00
[]
[]
TAGS #region-us
# Dataset Card for "araproje_arc_en_s2" More Information needed
[ "# Dataset Card for \"araproje_arc_en_s2\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"araproje_arc_en_s2\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"araproje_arc_en_s2\"\n\nMore Information needed" ]
8964ad7b119b5fd49a72ed4fd72b37d0003cee44
# Dataset Card for "araproje_arc_en_s3" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibranze/araproje_arc_en_s3
[ "region:us" ]
2024-01-04T02:57:23+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "choices", "sequence": [{"name": "text", "dtype": "string"}, {"name": "label", "dtype": "string"}]}, {"name": "answerKey", "dtype": "string"}], "splits": [{"name": "validation", "num_bytes": 80031.0, "num_examples": 250}], "download_size": 46971, "dataset_size": 80031.0}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]}
2024-01-04T02:58:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for "araproje_arc_en_s3" More Information needed
[ "# Dataset Card for \"araproje_arc_en_s3\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"araproje_arc_en_s3\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"araproje_arc_en_s3\"\n\nMore Information needed" ]
bf88a5708c7414de48671ca3f7f44b568502da89
# Dataset Card for "araproje_arc_en_s4" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibranze/araproje_arc_en_s4
[ "region:us" ]
2024-01-04T02:57:27+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "choices", "sequence": [{"name": "text", "dtype": "string"}, {"name": "label", "dtype": "string"}]}, {"name": "answerKey", "dtype": "string"}], "splits": [{"name": "validation", "num_bytes": 80031.0, "num_examples": 250}], "download_size": 47366, "dataset_size": 80031.0}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]}
2024-01-04T02:58:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "araproje_arc_en_s4" More Information needed
[ "# Dataset Card for \"araproje_arc_en_s4\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"araproje_arc_en_s4\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"araproje_arc_en_s4\"\n\nMore Information needed" ]
d2ae384fa23aeed060374e3c6e52f0b02227b7ae
# Dataset Card for "araproje_arc_en_s5" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ibranze/araproje_arc_en_s5
[ "region:us" ]
2024-01-04T02:57:33+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "choices", "sequence": [{"name": "text", "dtype": "string"}, {"name": "label", "dtype": "string"}]}, {"name": "answerKey", "dtype": "string"}], "splits": [{"name": "validation", "num_bytes": 80031.0, "num_examples": 250}], "download_size": 47124, "dataset_size": 80031.0}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]}
2024-01-04T02:58:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for "araproje_arc_en_s5" More Information needed
[ "# Dataset Card for \"araproje_arc_en_s5\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"araproje_arc_en_s5\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"araproje_arc_en_s5\"\n\nMore Information needed" ]
c77eb0a525ec6d18b19b7a9620f13de8b0941e49
# Dataset Card for "librispeech_synth" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Codec-SUPERB/librispeech_synth
[ "region:us" ]
2024-01-04T04:21:43+00:00
{"dataset_info": {"features": [{"name": "audio", "dtype": {"audio": {"sampling_rate": 16000}}}, {"name": "id", "dtype": "string"}], "splits": [{"name": "academicodec_hifi_16k_320d", "num_bytes": 113116345974.686, "num_examples": 292367}, {"name": "academicodec_hifi_16k_320d_large_uni", "num_bytes": 113116345974.686, "num_examples": 292367}, {"name": "academicodec_hifi_24k_320d", "num_bytes": 169685346294.686, "num_examples": 292367}, {"name": "funcodec_en_libritts_16k_gr1nq32ds320", "num_bytes": 113174576650.686, "num_examples": 292367}, {"name": "funcodec_en_libritts_16k_gr8nq32ds320", "num_bytes": 113173372218.686, "num_examples": 292367}, {"name": "audiodec_24k_320d", "num_bytes": 169835583482.686, "num_examples": 292367}, {"name": "original", "num_bytes": 63678669918.686, "num_examples": 292367}, {"name": "funcodec_en_libritts_16k_nq32ds320", "num_bytes": 113186105690.686, "num_examples": 292367}, {"name": "dac_16k", "num_bytes": 113185098868.686, "num_examples": 292367}, {"name": "funcodec_en_libritts_16k_nq32ds640", "num_bytes": 113186105690.686, "num_examples": 292367}, {"name": "funcodec_zh_en_16k_nq32ds320", "num_bytes": 113186105690.686, "num_examples": 292367}, {"name": "funcodec_zh_en_16k_nq32ds640", "num_bytes": 113186105690.686, "num_examples": 292367}, {"name": "dac_24k", "num_bytes": 169767074932.686, "num_examples": 292367}, {"name": "speech_tokenizer_16k", "num_bytes": 113255906934.686, "num_examples": 292367}], "download_size": 1424205343315, "dataset_size": 1704732744013.6042}, "configs": [{"config_name": "default", "data_files": [{"split": "academicodec_hifi_16k_320d", "path": "data/academicodec_hifi_16k_320d-*"}, {"split": "academicodec_hifi_16k_320d_large_uni", "path": "data/academicodec_hifi_16k_320d_large_uni-*"}, {"split": "academicodec_hifi_24k_320d", "path": "data/academicodec_hifi_24k_320d-*"}, {"split": "funcodec_en_libritts_16k_gr1nq32ds320", "path": "data/funcodec_en_libritts_16k_gr1nq32ds320-*"}, {"split": "funcodec_en_libritts_16k_gr8nq32ds320", "path": "data/funcodec_en_libritts_16k_gr8nq32ds320-*"}, {"split": "audiodec_24k_320d", "path": "data/audiodec_24k_320d-*"}, {"split": "original", "path": "data/original-*"}, {"split": "funcodec_en_libritts_16k_nq32ds320", "path": "data/funcodec_en_libritts_16k_nq32ds320-*"}, {"split": "dac_16k", "path": "data/dac_16k-*"}, {"split": "funcodec_en_libritts_16k_nq32ds640", "path": "data/funcodec_en_libritts_16k_nq32ds640-*"}, {"split": "funcodec_zh_en_16k_nq32ds320", "path": "data/funcodec_zh_en_16k_nq32ds320-*"}, {"split": "funcodec_zh_en_16k_nq32ds640", "path": "data/funcodec_zh_en_16k_nq32ds640-*"}, {"split": "dac_24k", "path": "data/dac_24k-*"}, {"split": "speech_tokenizer_16k", "path": "data/speech_tokenizer_16k-*"}]}]}
2024-01-15T14:57:31+00:00
[]
[]
TAGS #region-us
# Dataset Card for "librispeech_synth" More Information needed
[ "# Dataset Card for \"librispeech_synth\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"librispeech_synth\"\n\nMore Information needed" ]
[ 6, 16 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"librispeech_synth\"\n\nMore Information needed" ]
8bdb9a9bd140c007b8d1aa81c2fe349df70a5c95
# Dataset Card for "llama2_7b-arc_hard" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
automated-research-group/llama2_7b-arc_hard
[ "region:us" ]
2024-01-04T04:35:59+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "request", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "input_perplexity", "dtype": "float64"}, {"name": "input_likelihood", "dtype": "float64"}, {"name": "output_perplexity", "dtype": "float64"}, {"name": "output_likelihood", "dtype": "float64"}], "splits": [{"name": "validation", "num_bytes": 133089, "num_examples": 299}], "download_size": 70389, "dataset_size": 133089}, "configs": [{"config_name": "default", "data_files": [{"split": "validation", "path": "data/validation-*"}]}]}
2024-01-04T04:36:01+00:00
[]
[]
TAGS #region-us
# Dataset Card for "llama2_7b-arc_hard" More Information needed
[ "# Dataset Card for \"llama2_7b-arc_hard\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"llama2_7b-arc_hard\"\n\nMore Information needed" ]
[ 6, 21 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"llama2_7b-arc_hard\"\n\nMore Information needed" ]
04bdf8d7931f899120a1e8b78779f944f4210154
# Dataset Card for "llama2_7b-arc_hard-results_playing" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
automated-research-group/llama2_7b-arc_hard-results_playing
[ "region:us" ]
2024-01-04T04:37:35+00:00
{"dataset_info": {"config_name": "{'do_sample'=False, 'beams'=1}", "features": [{"name": "id", "dtype": "string"}, {"name": "prediction", "dtype": "string"}, {"name": "bool_accuracy", "dtype": "bool"}], "splits": [{"name": "train", "num_bytes": 11410, "num_examples": 299}], "download_size": 9803, "dataset_size": 11410}, "configs": [{"config_name": "{'do_sample'=False, 'beams'=1}", "data_files": [{"split": "train", "path": "{'do_sample'=False, 'beams'=1}/train-*"}]}]}
2024-01-04T04:37:36+00:00
[]
[]
TAGS #region-us
# Dataset Card for "llama2_7b-arc_hard-results_playing" More Information needed
[ "# Dataset Card for \"llama2_7b-arc_hard-results_playing\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"llama2_7b-arc_hard-results_playing\"\n\nMore Information needed" ]
[ 6, 28 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"llama2_7b-arc_hard-results_playing\"\n\nMore Information needed" ]
408eba388a08d3648c609c063ae6a8864f4fdc1a
# RLHF Reward Model Embedding Features for PKU-Alignment/PKU-SafeRLHF Dataset The RLHF reward model embedding features and corresponding original text are stored in `embeddings_train.jsonl` and `embeddings_test.jsonl`. The dataset is stored in pairwise ways: each data pair has 1) safer_example: input text of the safer example, 2) not_safer_example: input text of the more harmful example, 3) safer_embedding: embedding feature of the safer example, 4) not_safer_embedding: embedding feature of the more harmful example. The hidden embedding dimension is 4096. The reward model uses a linear layer to transfer the embedding features into a 1-dimensional score value. Note: The dataset is extremely large because of the large size of the original training dataset and the high dimension of embedding space. # Original Dataset If you need more detailed information about the original dataset, please refer to `train.jsonl.xz` and `test.jsonl.xz`. Since we use `shuffle=False` when generating the embeddings, orders are remained in our dataset. # Note This dataset is a processed version of PKU-Alignment/PKU-SafeRLHF: <https://huggingface.co/datasets/PKU-Alignment/PKU-SafeRLHF>.
Jayfeather1024/Reward-Embeddings
[ "license:unknown", "region:us" ]
2024-01-04T04:49:22+00:00
{"license": "unknown"}
2024-01-04T05:32:38+00:00
[]
[]
TAGS #license-unknown #region-us
# RLHF Reward Model Embedding Features for PKU-Alignment/PKU-SafeRLHF Dataset The RLHF reward model embedding features and corresponding original text are stored in 'embeddings_train.jsonl' and 'embeddings_test.jsonl'. The dataset is stored in pairwise ways: each data pair has 1) safer_example: input text of the safer example, 2) not_safer_example: input text of the more harmful example, 3) safer_embedding: embedding feature of the safer example, 4) not_safer_embedding: embedding feature of the more harmful example. The hidden embedding dimension is 4096. The reward model uses a linear layer to transfer the embedding features into a 1-dimensional score value. Note: The dataset is extremely large because of the large size of the original training dataset and the high dimension of embedding space. # Original Dataset If you need more detailed information about the original dataset, please refer to 'URL' and 'URL'. Since we use 'shuffle=False' when generating the embeddings, orders are remained in our dataset. # Note This dataset is a processed version of PKU-Alignment/PKU-SafeRLHF: <URL
[ "# RLHF Reward Model Embedding Features for PKU-Alignment/PKU-SafeRLHF Dataset\n\nThe RLHF reward model embedding features and corresponding original text are stored in 'embeddings_train.jsonl' and 'embeddings_test.jsonl'.\n\nThe dataset is stored in pairwise ways: each data pair has 1) safer_example: input text of the safer example, 2) not_safer_example: input text of the more harmful example, 3) safer_embedding: embedding feature of the safer example, 4) not_safer_embedding: embedding feature of the more harmful example.\n\nThe hidden embedding dimension is 4096. The reward model uses a linear layer to transfer the embedding features into a 1-dimensional score value.\n\nNote: The dataset is extremely large because of the large size of the original training dataset and the high dimension of embedding space.", "# Original Dataset\n\nIf you need more detailed information about the original dataset, please refer to 'URL' and 'URL'. Since we use 'shuffle=False' when generating the embeddings, orders are remained in our dataset.", "# Note\n\nThis dataset is a processed version of PKU-Alignment/PKU-SafeRLHF: <URL" ]
[ "TAGS\n#license-unknown #region-us \n", "# RLHF Reward Model Embedding Features for PKU-Alignment/PKU-SafeRLHF Dataset\n\nThe RLHF reward model embedding features and corresponding original text are stored in 'embeddings_train.jsonl' and 'embeddings_test.jsonl'.\n\nThe dataset is stored in pairwise ways: each data pair has 1) safer_example: input text of the safer example, 2) not_safer_example: input text of the more harmful example, 3) safer_embedding: embedding feature of the safer example, 4) not_safer_embedding: embedding feature of the more harmful example.\n\nThe hidden embedding dimension is 4096. The reward model uses a linear layer to transfer the embedding features into a 1-dimensional score value.\n\nNote: The dataset is extremely large because of the large size of the original training dataset and the high dimension of embedding space.", "# Original Dataset\n\nIf you need more detailed information about the original dataset, please refer to 'URL' and 'URL'. Since we use 'shuffle=False' when generating the embeddings, orders are remained in our dataset.", "# Note\n\nThis dataset is a processed version of PKU-Alignment/PKU-SafeRLHF: <URL" ]
[ 13, 226, 57, 29 ]
[ "passage: TAGS\n#license-unknown #region-us \n# RLHF Reward Model Embedding Features for PKU-Alignment/PKU-SafeRLHF Dataset\n\nThe RLHF reward model embedding features and corresponding original text are stored in 'embeddings_train.jsonl' and 'embeddings_test.jsonl'.\n\nThe dataset is stored in pairwise ways: each data pair has 1) safer_example: input text of the safer example, 2) not_safer_example: input text of the more harmful example, 3) safer_embedding: embedding feature of the safer example, 4) not_safer_embedding: embedding feature of the more harmful example.\n\nThe hidden embedding dimension is 4096. The reward model uses a linear layer to transfer the embedding features into a 1-dimensional score value.\n\nNote: The dataset is extremely large because of the large size of the original training dataset and the high dimension of embedding space.# Original Dataset\n\nIf you need more detailed information about the original dataset, please refer to 'URL' and 'URL'. Since we use 'shuffle=False' when generating the embeddings, orders are remained in our dataset.# Note\n\nThis dataset is a processed version of PKU-Alignment/PKU-SafeRLHF: <URL" ]
eb1e5b485a5d771c66c64dd0990e15496581aaf6
# Dataset Card for "math_instruct_binarized" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
jan-hq/math_instruct_binarized
[ "region:us" ]
2024-01-04T05:17:42+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}], "dataset_info": {"features": [{"name": "messages", "list": [{"name": "content", "dtype": "string"}, {"name": "role", "dtype": "string"}]}], "splits": [{"name": "train", "num_bytes": 169356177.9, "num_examples": 235836}, {"name": "test", "num_bytes": 18817353.1, "num_examples": 26204}], "download_size": 95177988, "dataset_size": 188173531.0}}
2024-01-04T05:17:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for "math_instruct_binarized" More Information needed
[ "# Dataset Card for \"math_instruct_binarized\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"math_instruct_binarized\"\n\nMore Information needed" ]
[ 6, 18 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"math_instruct_binarized\"\n\nMore Information needed" ]
0b8b93812a89ed4cbba13ca86b092c973f341321
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
EiffL/DESI2
[ "size_categories:1M<n<10M", "license:mit", "region:us" ]
2024-01-04T05:45:22+00:00
{"license": "mit", "size_categories": ["1M<n<10M"], "configs": [{"config_name": "EDR_SV3", "version": "0.0.1", "default": true, "data_files": [{"split": "train", "path": "data/EDR_SV3/*.parquet"}]}], "dataset_info": {"config_name": "EDR_SV3", "features": [{"name": "TARGETID", "dtype": "int64"}, {"name": "SURVEY", "dtype": "string"}, {"name": "PROGRAM", "dtype": "string"}, {"name": "HEALPIX", "dtype": "int32"}, {"name": "TARGET_RA", "dtype": "float64"}, {"name": "TARGET_DEC", "dtype": "float64"}, {"name": "RELEASE", "dtype": "int16"}, {"name": "BRICKID", "dtype": "int32"}, {"name": "BRICK_OBJID", "dtype": "int32"}, {"name": "Z", "dtype": "float64"}, {"name": "EBV", "dtype": "float32"}, {"name": "FLUX_G", "dtype": "float32"}, {"name": "FLUX_R", "dtype": "float32"}, {"name": "FLUX_Z", "dtype": "float32"}, {"name": "FLUX_IVAR_G", "dtype": "float32"}, {"name": "FLUX_IVAR_R", "dtype": "float32"}, {"name": "FLUX_IVAR_Z", "dtype": "float32"}, {"name": "wave", "dtype": "float32"}, {"name": "flux", "sequence": "float32", "length": 7781}, {"name": "ivar", "sequence": "float32", "length": 7781}], "homepage": "https://www.desi.lbl.gov/", "description": "One Percent Survey from the Early Data Release of the DESI Spectroscopic Survey", "splits": [{"name": "train", "num_bytes": 72417839557, "num_examples": 1126441}], "download_size": 70656849405, "dataset_size": 72417839557}}
2024-01-04T06:40:32+00:00
[]
[]
TAGS #size_categories-1M<n<10M #license-mit #region-us
# Dataset Card for Dataset Name This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#size_categories-1M<n<10M #license-mit #region-us \n", "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 23, 34, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#size_categories-1M<n<10M #license-mit #region-us \n# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
bf4610c3af70c7e11fc8a0f6e2faf3fc0ad2f2bd
# Dataset Card for "flan-10k-flat-reduced-templated" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
sordonia/flan-10k-reduced-templated-ia-flat
[ "region:us" ]
2024-01-04T06:05:49+00:00
{"dataset_info": {"features": [{"name": "source", "dtype": "string"}, {"name": "target", "dtype": "string"}, {"name": "task_name", "dtype": "string"}, {"name": "task_source", "dtype": "string"}, {"name": "template_type", "dtype": "string"}, {"name": "template_idx", "dtype": "int64"}, {"name": "split", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 7458503509, "num_examples": 2345464}], "download_size": 4074707295, "dataset_size": 7458503509}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-04T06:08:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for "flan-10k-flat-reduced-templated" More Information needed
[ "# Dataset Card for \"flan-10k-flat-reduced-templated\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"flan-10k-flat-reduced-templated\"\n\nMore Information needed" ]
[ 6, 23 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"flan-10k-flat-reduced-templated\"\n\nMore Information needed" ]
48012602b501af5110e1b64b557e1aadfc59e81a
# TTS-test-wavs For test.
AaronLi/TTS-test-wavs
[ "license:mit", "region:us" ]
2024-01-04T07:59:30+00:00
{"license": "mit"}
2024-01-04T08:07:25+00:00
[]
[]
TAGS #license-mit #region-us
# TTS-test-wavs For test.
[ "# TTS-test-wavs\n\nFor test." ]
[ "TAGS\n#license-mit #region-us \n", "# TTS-test-wavs\n\nFor test." ]
[ 11, 11 ]
[ "passage: TAGS\n#license-mit #region-us \n# TTS-test-wavs\n\nFor test." ]
f69b075721ed1f15d98749814be8cab8be65d5cb
tejasvaidhya/testing
[ "region:us" ]
2024-01-04T07:59:39+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "ocr_annotation_texts", "dtype": "string"}, {"name": "image_height", "dtype": "int64"}, {"name": "image_width", "dtype": "int64"}]}, "configs": [{"config_name": "default", "data_files": [{"split": "test", "path": "testing.parquet"}]}]}
2024-01-09T06:01:22+00:00
[]
[]
TAGS #region-us
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
efec09504a4b024941267d48c8830f91433f8f82
# Dataset Card for "dog_breed_classification" Dataset from Kaggle Competition Dog Breeds
ajinkyakolhe112/dog_breed_classification_kaggle
[ "region:us" ]
2024-01-04T08:13:37+00:00
{"pretty_name": "Stanford Dog Breed Classification from Imagenet", "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "affenpinscher", "1": "afghan_hound", "2": "african_hunting_dog", "3": "airedale", "4": "american_staffordshire_terrier", "5": "appenzeller", "6": "australian_terrier", "7": "basenji", "8": "basset", "9": "beagle", "10": "bedlington_terrier", "11": "bernese_mountain_dog", "12": "black-and-tan_coonhound", "13": "blenheim_spaniel", "14": "bloodhound", "15": "bluetick", "16": "border_collie", "17": "border_terrier", "18": "borzoi", "19": "boston_bull", "20": "bouvier_des_flandres", "21": "boxer", "22": "brabancon_griffon", "23": "briard", "24": "brittany_spaniel", "25": "bull_mastiff", "26": "cairn", "27": "cardigan", "28": "chesapeake_bay_retriever", "29": "chihuahua", "30": "chow", "31": "clumber", "32": "cocker_spaniel", "33": "collie", "34": "curly-coated_retriever", "35": "dandie_dinmont", "36": "dhole", "37": "dingo", "38": "doberman", "39": "english_foxhound", "40": "english_setter", "41": "english_springer", "42": "entlebucher", "43": "eskimo_dog", "44": "flat-coated_retriever", "45": "french_bulldog", "46": "german_shepherd", "47": "german_short-haired_pointer", "48": "giant_schnauzer", "49": "golden_retriever", "50": "gordon_setter", "51": "great_dane", "52": "great_pyrenees", "53": "greater_swiss_mountain_dog", "54": "groenendael", "55": "ibizan_hound", "56": "irish_setter", "57": "irish_terrier", "58": "irish_water_spaniel", "59": "irish_wolfhound", "60": "italian_greyhound", "61": "japanese_spaniel", "62": "keeshond", "63": "kelpie", "64": "kerry_blue_terrier", "65": "komondor", "66": "kuvasz", "67": "labrador_retriever", "68": "lakeland_terrier", "69": "leonberg", "70": "lhasa", "71": "malamute", "72": "malinois", "73": "maltese_dog", "74": "mexican_hairless", "75": "miniature_pinscher", "76": "miniature_poodle", "77": "miniature_schnauzer", "78": "newfoundland", "79": "norfolk_terrier", "80": "norwegian_elkhound", "81": "norwich_terrier", "82": "old_english_sheepdog", "83": "otterhound", "84": "papillon", "85": "pekinese", "86": "pembroke", "87": "pomeranian", "88": "pug", "89": "redbone", "90": "rhodesian_ridgeback", "91": "rottweiler", "92": "saint_bernard", "93": "saluki", "94": "samoyed", "95": "schipperke", "96": "scotch_terrier", "97": "scottish_deerhound", "98": "sealyham_terrier", "99": "shetland_sheepdog", "100": "shih-tzu", "101": "siberian_husky", "102": "silky_terrier", "103": "soft-coated_wheaten_terrier", "104": "staffordshire_bullterrier", "105": "standard_poodle", "106": "standard_schnauzer", "107": "sussex_spaniel", "108": "test", "109": "tibetan_mastiff", "110": "tibetan_terrier", "111": "toy_poodle", "112": "toy_terrier", "113": "vizsla", "114": "walker_hound", "115": "weimaraner", "116": "welsh_springer_spaniel", "117": "west_highland_white_terrier", "118": "whippet", "119": "wire-haired_fox_terrier", "120": "yorkshire_terrier"}}}}], "splits": [{"name": "train", "num_bytes": 373188614.424, "num_examples": 10222}, {"name": "test", "num_bytes": 343356807.568, "num_examples": 10357}], "download_size": 725142349, "dataset_size": 716545421.9920001}}
2024-01-04T08:17:30+00:00
[]
[]
TAGS #region-us
# Dataset Card for "dog_breed_classification" Dataset from Kaggle Competition Dog Breeds
[ "# Dataset Card for \"dog_breed_classification\"\n\nDataset from Kaggle Competition Dog Breeds" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"dog_breed_classification\"\n\nDataset from Kaggle Competition Dog Breeds" ]
[ 6, 24 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"dog_breed_classification\"\n\nDataset from Kaggle Competition Dog Breeds" ]
ae82c43efaf157d419ded52e18db3eaae2d9e55f
# capybara-sharegpt [LDJnr/Capybara](https://huggingface.co/datasets/LDJnr/Capybara) converted to ShareGPT format for use in common training repositories. Please refer to the original repository's dataset card for more information. All credit goes to the original creator.
Doctor-Shotgun/capybara-sharegpt
[ "task_categories:text-generation", "task_categories:conversational", "task_categories:question-answering", "language:en", "license:apache-2.0", "Physics", "Biology", "Math", "Chemistry", "Culture", "Logic", "Roleplay", "region:us" ]
2024-01-04T08:36:17+00:00
{"language": ["en"], "license": "apache-2.0", "task_categories": ["text-generation", "conversational", "question-answering"], "tags": ["Physics", "Biology", "Math", "Chemistry", "Culture", "Logic", "Roleplay"]}
2024-01-04T08:43:48+00:00
[]
[ "en" ]
TAGS #task_categories-text-generation #task_categories-conversational #task_categories-question-answering #language-English #license-apache-2.0 #Physics #Biology #Math #Chemistry #Culture #Logic #Roleplay #region-us
# capybara-sharegpt LDJnr/Capybara converted to ShareGPT format for use in common training repositories. Please refer to the original repository's dataset card for more information. All credit goes to the original creator.
[ "# capybara-sharegpt\n\nLDJnr/Capybara converted to ShareGPT format for use in common training repositories.\n\nPlease refer to the original repository's dataset card for more information. All credit goes to the original creator." ]
[ "TAGS\n#task_categories-text-generation #task_categories-conversational #task_categories-question-answering #language-English #license-apache-2.0 #Physics #Biology #Math #Chemistry #Culture #Logic #Roleplay #region-us \n", "# capybara-sharegpt\n\nLDJnr/Capybara converted to ShareGPT format for use in common training repositories.\n\nPlease refer to the original repository's dataset card for more information. All credit goes to the original creator." ]
[ 75, 56 ]
[ "passage: TAGS\n#task_categories-text-generation #task_categories-conversational #task_categories-question-answering #language-English #license-apache-2.0 #Physics #Biology #Math #Chemistry #Culture #Logic #Roleplay #region-us \n# capybara-sharegpt\n\nLDJnr/Capybara converted to ShareGPT format for use in common training repositories.\n\nPlease refer to the original repository's dataset card for more information. All credit goes to the original creator." ]
d4dfbb6e382fcebe5ef340c38f83fa753d47d7b0
# Speech Recognition Alignment Dataset This dataset is a variation of several widely-used ASR datasets, encompassing Librispeech, MuST-C, TED-LIUM, VoxPopuli, Common Voice, and GigaSpeech. The difference is this dataset includes: - Precise alignment between audio and text. - Text that has been punctuated and made case-sensitive. - Identification of named entities in the text. # Usage First, install the latest version of the 🤗 Datasets package: ```bash pip install --upgrade pip pip install --upgrade datasets[audio] ``` The dataset can be downloaded and pre-processed on disk using the [`load_dataset`](https://huggingface.co/docs/datasets/v2.14.5/en/package_reference/loading_methods#datasets.load_dataset) function: ```python from datasets import load_dataset # Available dataset: 'libris','mustc','tedlium','voxpopuli','commonvoice','gigaspeech' dataset = load_dataset("nguyenvulebinh/asr-alignment", "libris") # take the first sample of the validation set sample = dataset["train"][0] ``` It can also be streamed directly from the Hub using Datasets' [streaming mode](https://huggingface.co/blog/audio-datasets#streaming-mode-the-silver-bullet). Loading a dataset in streaming mode loads individual samples of the dataset at a time, rather than downloading the entire dataset to disk: ```python from datasets import load_dataset dataset = load_dataset("nguyenvulebinh/asr-alignment", "libris", streaming=True) # take the first sample of the validation set sample = next(iter(dataset["train"])) ``` ## Citation If you use this data, please consider citing the [ICASSP 2024 Paper: SYNTHETIC CONVERSATIONS IMPROVE MULTI-TALKER ASR](): ``` @INPROCEEDINGS{synthetic-multi-asr-nguyen, author={Nguyen, Thai-Binh and Waibel, Alexander}, booktitle={ICASSP 2024 - 2024 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP)}, title={SYNTHETIC CONVERSATIONS IMPROVE MULTI-TALKER ASR}, year={2024}, volume={}, number={}, } ``` ## License This dataset is licensed in accordance with the terms of the original dataset.
nguyenvulebinh/asr-alignment
[ "size_categories:10M<n<100M", "language:en", "license:apache-2.0", "region:us" ]
2024-01-04T08:46:30+00:00
{"language": ["en"], "license": "apache-2.0", "size_categories": ["10M<n<100M"], "pretty_name": "Speech Recognition Alignment Dataset", "dataset_info": [{"config_name": "commonvoice", "features": [{"name": "id", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "audio", "dtype": {"audio": {"sampling_rate": 16000}}}, {"name": "words", "sequence": "string"}, {"name": "word_start", "sequence": "float64"}, {"name": "word_end", "sequence": "float64"}, {"name": "entity_start", "sequence": "int64"}, {"name": "entity_end", "sequence": "int64"}, {"name": "entity_label", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 43744079378.659, "num_examples": 948733}, {"name": "valid", "num_bytes": 722372503.994, "num_examples": 16353}], "download_size": 39798988113, "dataset_size": 44466451882.653}, {"config_name": "gigaspeech", "features": [{"name": "id", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "audio", "dtype": {"audio": {"sampling_rate": 16000}}}, {"name": "words", "sequence": "string"}, {"name": "word_start", "sequence": "float64"}, {"name": "word_end", "sequence": "float64"}, {"name": "entity_start", "sequence": "int64"}, {"name": "entity_end", "sequence": "int64"}, {"name": "entity_label", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 1032024261294.48, "num_examples": 8282987}, {"name": "valid", "num_bytes": 1340974408.04, "num_examples": 5715}], "download_size": 1148966064515, "dataset_size": 1033365235702.52}, {"config_name": "libris", "features": [{"name": "id", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "audio", "dtype": {"audio": {"sampling_rate": 16000}}}, {"name": "words", "sequence": "string"}, {"name": "word_start", "sequence": "float64"}, {"name": "word_end", "sequence": "float64"}, {"name": "entity_start", "sequence": "int64"}, {"name": "entity_end", "sequence": "int64"}, {"name": "entity_label", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 63849575890.896, "num_examples": 281241}, {"name": "valid", "num_bytes": 793442600.643, "num_examples": 5559}], "download_size": 61361142328, "dataset_size": 64643018491.539}, {"config_name": "mustc", "features": [{"name": "id", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "audio", "dtype": {"audio": {"sampling_rate": 16000}}}, {"name": "words", "sequence": "string"}, {"name": "word_start", "sequence": "float64"}, {"name": "word_end", "sequence": "float64"}, {"name": "entity_start", "sequence": "int64"}, {"name": "entity_end", "sequence": "int64"}, {"name": "entity_label", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 55552777413.1, "num_examples": 248612}, {"name": "valid", "num_bytes": 313397447.704, "num_examples": 1408}], "download_size": 52028374666, "dataset_size": 55866174860.804}, {"config_name": "tedlium", "features": [{"name": "id", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "audio", "dtype": {"audio": {"sampling_rate": 16000}}}, {"name": "words", "sequence": "string"}, {"name": "word_start", "sequence": "float64"}, {"name": "word_end", "sequence": "float64"}, {"name": "entity_start", "sequence": "int64"}, {"name": "entity_end", "sequence": "int64"}, {"name": "entity_label", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 56248950771.568, "num_examples": 268216}, {"name": "valid", "num_bytes": 321930549.928, "num_examples": 1456}], "download_size": 52557126451, "dataset_size": 56570881321.496}, {"config_name": "voxpopuli", "features": [{"name": "id", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "audio", "dtype": {"audio": {"sampling_rate": 16000}}}, {"name": "words", "sequence": "string"}, {"name": "word_start", "sequence": "float64"}, {"name": "word_end", "sequence": "float64"}, {"name": "entity_start", "sequence": "int64"}, {"name": "entity_end", "sequence": "int64"}, {"name": "entity_label", "sequence": "string"}], "splits": [{"name": "train", "num_bytes": 118516424284.524, "num_examples": 182463}, {"name": "valid", "num_bytes": 1144543020.808, "num_examples": 1842}], "download_size": 98669668241, "dataset_size": 119660967305.332}], "configs": [{"config_name": "commonvoice", "data_files": [{"split": "train", "path": "commonvoice/train-*"}, {"split": "valid", "path": "commonvoice/valid-*"}]}, {"config_name": "gigaspeech", "data_files": [{"split": "train", "path": "gigaspeech/train-*"}, {"split": "valid", "path": "gigaspeech/valid-*"}]}, {"config_name": "libris", "data_files": [{"split": "train", "path": "libris/train-*"}, {"split": "valid", "path": "libris/valid-*"}]}, {"config_name": "mustc", "data_files": [{"split": "train", "path": "mustc/train-*"}, {"split": "valid", "path": "mustc/valid-*"}]}, {"config_name": "tedlium", "data_files": [{"split": "train", "path": "tedlium/train-*"}, {"split": "valid", "path": "tedlium/valid-*"}]}, {"config_name": "voxpopuli", "data_files": [{"split": "train", "path": "voxpopuli/train-*"}, {"split": "valid", "path": "voxpopuli/valid-*"}]}]}
2024-01-08T08:48:13+00:00
[]
[ "en" ]
TAGS #size_categories-10M<n<100M #language-English #license-apache-2.0 #region-us
# Speech Recognition Alignment Dataset This dataset is a variation of several widely-used ASR datasets, encompassing Librispeech, MuST-C, TED-LIUM, VoxPopuli, Common Voice, and GigaSpeech. The difference is this dataset includes: - Precise alignment between audio and text. - Text that has been punctuated and made case-sensitive. - Identification of named entities in the text. # Usage First, install the latest version of the Datasets package: The dataset can be downloaded and pre-processed on disk using the 'load_dataset' function: It can also be streamed directly from the Hub using Datasets' streaming mode. Loading a dataset in streaming mode loads individual samples of the dataset at a time, rather than downloading the entire dataset to disk: If you use this data, please consider citing the [ICASSP 2024 Paper: SYNTHETIC CONVERSATIONS IMPROVE MULTI-TALKER ASR](): ## License This dataset is licensed in accordance with the terms of the original dataset.
[ "# Speech Recognition Alignment Dataset\n\nThis dataset is a variation of several widely-used ASR datasets, encompassing Librispeech, MuST-C, TED-LIUM, VoxPopuli, Common Voice, and GigaSpeech. The difference is this dataset includes:\n- Precise alignment between audio and text. \n- Text that has been punctuated and made case-sensitive.\n- Identification of named entities in the text.", "# Usage\n\nFirst, install the latest version of the Datasets package:\n\n\n\nThe dataset can be downloaded and pre-processed on disk using the 'load_dataset' \nfunction:\n\n\n\nIt can also be streamed directly from the Hub using Datasets' streaming mode.\nLoading a dataset in streaming mode loads individual samples of the dataset at a time, rather than downloading the entire \ndataset to disk:\n\n\n\nIf you use this data, please consider citing the [ICASSP 2024 Paper: SYNTHETIC CONVERSATIONS IMPROVE MULTI-TALKER ASR]():", "## License\n\nThis dataset is licensed in accordance with the terms of the original dataset." ]
[ "TAGS\n#size_categories-10M<n<100M #language-English #license-apache-2.0 #region-us \n", "# Speech Recognition Alignment Dataset\n\nThis dataset is a variation of several widely-used ASR datasets, encompassing Librispeech, MuST-C, TED-LIUM, VoxPopuli, Common Voice, and GigaSpeech. The difference is this dataset includes:\n- Precise alignment between audio and text. \n- Text that has been punctuated and made case-sensitive.\n- Identification of named entities in the text.", "# Usage\n\nFirst, install the latest version of the Datasets package:\n\n\n\nThe dataset can be downloaded and pre-processed on disk using the 'load_dataset' \nfunction:\n\n\n\nIt can also be streamed directly from the Hub using Datasets' streaming mode.\nLoading a dataset in streaming mode loads individual samples of the dataset at a time, rather than downloading the entire \ndataset to disk:\n\n\n\nIf you use this data, please consider citing the [ICASSP 2024 Paper: SYNTHETIC CONVERSATIONS IMPROVE MULTI-TALKER ASR]():", "## License\n\nThis dataset is licensed in accordance with the terms of the original dataset." ]
[ 30, 109, 128, 19 ]
[ "passage: TAGS\n#size_categories-10M<n<100M #language-English #license-apache-2.0 #region-us \n# Speech Recognition Alignment Dataset\n\nThis dataset is a variation of several widely-used ASR datasets, encompassing Librispeech, MuST-C, TED-LIUM, VoxPopuli, Common Voice, and GigaSpeech. The difference is this dataset includes:\n- Precise alignment between audio and text. \n- Text that has been punctuated and made case-sensitive.\n- Identification of named entities in the text.# Usage\n\nFirst, install the latest version of the Datasets package:\n\n\n\nThe dataset can be downloaded and pre-processed on disk using the 'load_dataset' \nfunction:\n\n\n\nIt can also be streamed directly from the Hub using Datasets' streaming mode.\nLoading a dataset in streaming mode loads individual samples of the dataset at a time, rather than downloading the entire \ndataset to disk:\n\n\n\nIf you use this data, please consider citing the [ICASSP 2024 Paper: SYNTHETIC CONVERSATIONS IMPROVE MULTI-TALKER ASR]():## License\n\nThis dataset is licensed in accordance with the terms of the original dataset." ]
196a17d5f41ee729fe82b7de7a51707e8569ff0c
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
bibekyess/layout-detector-flagged-samples
[ "region:us" ]
2024-01-04T08:49:33+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data.csv"}]}]}
2024-01-05T02:42:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 8, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Dataset Name## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
ee20570ae7a29c51571e55a9a17983f7625295d6
Data downloaded from [WILDS](https://wilds.stanford.edu/) ([Download](https://wilds.stanford.edu/downloads), [paper](https://arxiv.org/abs/1812.01754), [project](https://ai.bu.edu/M3SDA/)). This dataset contains some copyrighted material whose use has not been specifically authorized by the copyright owners. In an effort to advance scientific research, we make this material available for academic research. We believe this constitutes a fair use of any such copyrighted material as provided for in section 107 of the US Copyright Law. In accordance with Title 17 U.S.C. Section 107, the material on this site is distributed without profit for non-commercial research and educational purposes. For more information on fair use please click [here](https://www.law.cornell.edu/uscode/text/17/107). If you wish to use copyrighted material on this site or in our dataset for purposes of your own that go beyond non-commercial research and academic purposes, you must obtain permission directly from the copyright owner. (adapted from the [official DomainNet website](https://ai.bu.edu/M3SDA/#refs))
wltjr1007/DomainNet
[ "task_categories:image-classification", "task_categories:zero-shot-image-classification", "task_ids:multi-class-image-classification", "task_ids:multi-class-classification", "size_categories:100K<n<1M", "language:en", "license:other", "arxiv:1812.01754", "region:us" ]
2024-01-04T08:58:58+00:00
{"language": ["en"], "license": "other", "size_categories": ["100K<n<1M"], "task_categories": ["image-classification", "zero-shot-image-classification"], "task_ids": ["multi-class-image-classification", "multi-class-classification"], "pretty_name": "DomainNet", "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "aircraft_carrier", "1": "airplane", "2": "alarm_clock", "3": "ambulance", "4": "angel", "5": "animal_migration", "6": "ant", "7": "anvil", "8": "apple", "9": "arm", "10": "asparagus", "11": "axe", "12": "backpack", "13": "banana", "14": "bandage", "15": "barn", "16": "baseball", "17": "baseball_bat", "18": "basket", "19": "basketball", "20": "bat", "21": "bathtub", "22": "beach", "23": "bear", "24": "beard", "25": "bed", "26": "bee", "27": "belt", "28": "bench", "29": "bicycle", "30": "binoculars", "31": "bird", "32": "birthday_cake", "33": "blackberry", "34": "blueberry", "35": "book", "36": "boomerang", "37": "bottlecap", "38": "bowtie", "39": "bracelet", "40": "brain", "41": "bread", "42": "bridge", "43": "broccoli", "44": "broom", "45": "bucket", "46": "bulldozer", "47": "bus", "48": "bush", "49": "butterfly", "50": "cactus", "51": "cake", "52": "calculator", "53": "calendar", "54": "camel", "55": "camera", "56": "camouflage", "57": "campfire", "58": "candle", "59": "cannon", "60": "canoe", "61": "car", "62": "carrot", "63": "castle", "64": "cat", "65": "ceiling_fan", "66": "cello", "67": "cell_phone", "68": "chair", "69": "chandelier", "70": "church", "71": "circle", "72": "clarinet", "73": "clock", "74": "cloud", "75": "coffee_cup", "76": "compass", "77": "computer", "78": "cookie", "79": "cooler", "80": "couch", "81": "cow", "82": "crab", "83": "crayon", "84": "crocodile", "85": "crown", "86": "cruise_ship", "87": "cup", "88": "diamond", "89": "dishwasher", "90": "diving_board", "91": "dog", "92": "dolphin", "93": "donut", "94": "door", "95": "dragon", "96": "dresser", "97": "drill", "98": "drums", "99": "duck", "100": "dumbbell", "101": "ear", "102": "elbow", "103": "elephant", "104": "envelope", "105": "eraser", "106": "eye", "107": "eyeglasses", "108": "face", "109": "fan", "110": "feather", "111": "fence", "112": "finger", "113": "fire_hydrant", "114": "fireplace", "115": "firetruck", "116": "fish", "117": "flamingo", "118": "flashlight", "119": "flip_flops", "120": "floor_lamp", "121": "flower", "122": "flying_saucer", "123": "foot", "124": "fork", "125": "frog", "126": "frying_pan", "127": "garden", "128": "garden_hose", "129": "giraffe", "130": "goatee", "131": "golf_club", "132": "grapes", "133": "grass", "134": "guitar", "135": "hamburger", "136": "hammer", "137": "hand", "138": "harp", "139": "hat", "140": "headphones", "141": "hedgehog", "142": "helicopter", "143": "helmet", "144": "hexagon", "145": "hockey_puck", "146": "hockey_stick", "147": "horse", "148": "hospital", "149": "hot_air_balloon", "150": "hot_dog", "151": "hot_tub", "152": "hourglass", "153": "house", "154": "house_plant", "155": "hurricane", "156": "ice_cream", "157": "jacket", "158": "jail", "159": "kangaroo", "160": "key", "161": "keyboard", "162": "knee", "163": "knife", "164": "ladder", "165": "lantern", "166": "laptop", "167": "leaf", "168": "leg", "169": "light_bulb", "170": "lighter", "171": "lighthouse", "172": "lightning", "173": "line", "174": "lion", "175": "lipstick", "176": "lobster", "177": "lollipop", "178": "mailbox", "179": "map", "180": "marker", "181": "matches", "182": "megaphone", "183": "mermaid", "184": "microphone", "185": "microwave", "186": "monkey", "187": "moon", "188": "mosquito", "189": "motorbike", "190": "mountain", "191": "mouse", "192": "moustache", "193": "mouth", "194": "mug", "195": "mushroom", "196": "nail", "197": "necklace", "198": "nose", "199": "ocean", "200": "octagon", "201": "octopus", "202": "onion", "203": "oven", "204": "owl", "205": "paintbrush", "206": "paint_can", "207": "palm_tree", "208": "panda", "209": "pants", "210": "paper_clip", "211": "parachute", "212": "parrot", "213": "passport", "214": "peanut", "215": "pear", "216": "peas", "217": "pencil", "218": "penguin", "219": "piano", "220": "pickup_truck", "221": "picture_frame", "222": "pig", "223": "pillow", "224": "pineapple", "225": "pizza", "226": "pliers", "227": "police_car", "228": "pond", "229": "pool", "230": "popsicle", "231": "postcard", "232": "potato", "233": "power_outlet", "234": "purse", "235": "rabbit", "236": "raccoon", "237": "radio", "238": "rain", "239": "rainbow", "240": "rake", "241": "remote_control", "242": "rhinoceros", "243": "rifle", "244": "river", "245": "roller_coaster", "246": "rollerskates", "247": "sailboat", "248": "sandwich", "249": "saw", "250": "saxophone", "251": "school_bus", "252": "scissors", "253": "scorpion", "254": "screwdriver", "255": "sea_turtle", "256": "see_saw", "257": "shark", "258": "sheep", "259": "shoe", "260": "shorts", "261": "shovel", "262": "sink", "263": "skateboard", "264": "skull", "265": "skyscraper", "266": "sleeping_bag", "267": "smiley_face", "268": "snail", "269": "snake", "270": "snorkel", "271": "snowflake", "272": "snowman", "273": "soccer_ball", "274": "sock", "275": "speedboat", "276": "spider", "277": "spoon", "278": "spreadsheet", "279": "square", "280": "squiggle", "281": "squirrel", "282": "stairs", "283": "star", "284": "steak", "285": "stereo", "286": "stethoscope", "287": "stitches", "288": "stop_sign", "289": "stove", "290": "strawberry", "291": "streetlight", "292": "string_bean", "293": "submarine", "294": "suitcase", "295": "sun", "296": "swan", "297": "sweater", "298": "swing_set", "299": "sword", "300": "syringe", "301": "table", "302": "teapot", "303": "teddy-bear", "304": "telephone", "305": "television", "306": "tennis_racquet", "307": "tent", "308": "The_Eiffel_Tower", "309": "The_Great_Wall_of_China", "310": "The_Mona_Lisa", "311": "tiger", "312": "toaster", "313": "toe", "314": "toilet", "315": "tooth", "316": "toothbrush", "317": "toothpaste", "318": "tornado", "319": "tractor", "320": "traffic_light", "321": "train", "322": "tree", "323": "triangle", "324": "trombone", "325": "truck", "326": "trumpet", "327": "t-shirt", "328": "umbrella", "329": "underwear", "330": "van", "331": "vase", "332": "violin", "333": "washing_machine", "334": "watermelon", "335": "waterslide", "336": "whale", "337": "wheel", "338": "windmill", "339": "wine_bottle", "340": "wine_glass", "341": "wristwatch", "342": "yoga", "343": "zebra", "344": "zigzag"}}}}, {"name": "domain", "dtype": {"class_label": {"names": {"0": "clipart", "1": "infograph", "2": "painting", "3": "quickdraw", "4": "real", "5": "sketch"}}}}, {"name": "image_path", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 1098474093.3600001, "num_examples": 409832}, {"name": "test", "num_bytes": 471724034.589, "num_examples": 176743}], "download_size": 18521436207, "dataset_size": 1570198127.9490001}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}, {"split": "test", "path": "data/test-*"}]}]}
2024-01-11T07:57:51+00:00
[ "1812.01754" ]
[ "en" ]
TAGS #task_categories-image-classification #task_categories-zero-shot-image-classification #task_ids-multi-class-image-classification #task_ids-multi-class-classification #size_categories-100K<n<1M #language-English #license-other #arxiv-1812.01754 #region-us
Data downloaded from WILDS (Download, paper, project). This dataset contains some copyrighted material whose use has not been specifically authorized by the copyright owners. In an effort to advance scientific research, we make this material available for academic research. We believe this constitutes a fair use of any such copyrighted material as provided for in section 107 of the US Copyright Law. In accordance with Title 17 U.S.C. Section 107, the material on this site is distributed without profit for non-commercial research and educational purposes. For more information on fair use please click here. If you wish to use copyrighted material on this site or in our dataset for purposes of your own that go beyond non-commercial research and academic purposes, you must obtain permission directly from the copyright owner. (adapted from the official DomainNet website)
[]
[ "TAGS\n#task_categories-image-classification #task_categories-zero-shot-image-classification #task_ids-multi-class-image-classification #task_ids-multi-class-classification #size_categories-100K<n<1M #language-English #license-other #arxiv-1812.01754 #region-us \n" ]
[ 88 ]
[ "passage: TAGS\n#task_categories-image-classification #task_categories-zero-shot-image-classification #task_ids-multi-class-image-classification #task_ids-multi-class-classification #size_categories-100K<n<1M #language-English #license-other #arxiv-1812.01754 #region-us \n" ]
eac0131e730683c751dd63d4d0f53a3a80f55ff3
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
neoALI/layout-detector-flagged-samples
[ "region:us" ]
2024-01-04T09:06:55+00:00
{"configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data.csv"}]}]}
2024-02-05T07:42:15+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 8, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Dataset Name## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
03beb37563ab466cece6cc56efc04aea62a973a3
# Dataset Card for "cats_vs_dogs_classification" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
ajinkyakolhe112/cats_vs_dogs_classification_kaggle
[ "region:us" ]
2024-01-04T09:08:54+00:00
{"dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "label", "dtype": {"class_label": {"names": {"0": "cat", "1": "dog", "2": "test"}}}}], "splits": [{"name": "train", "num_bytes": 525901830.0, "num_examples": 25000}, {"name": "test", "num_bytes": 287220682.0, "num_examples": 12500}], "download_size": 857484612, "dataset_size": 813122512.0}}
2024-01-04T09:10:56+00:00
[]
[]
TAGS #region-us
# Dataset Card for "cats_vs_dogs_classification" More Information needed
[ "# Dataset Card for \"cats_vs_dogs_classification\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"cats_vs_dogs_classification\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"cats_vs_dogs_classification\"\n\nMore Information needed" ]
d8bcea3f1d275a57086e5c0ea60a857a408141f4
# Dataset Card for WebSight ## Dataset Version Alpha version, v0.1. ## Dataset Description This dataset consists of 823,000 HTML/CSS codes representing synthetically generated English websites, each accompanied by a corresponding screenshot (rendered with [Playwright](https://github.com/microsoft/playwright)). This dataset serves as a valuable resource for tasks such as generating UI codes from a screenshot. Notably, it is used for the training of the forthcoming Idefics-2 model, aiming at augmenting its proficiency in generating website code based on a provided screenshot. ## Data Fields An example of a sample appears as follows: ``` { 'images': PIL.Image, 'text': '<html>\n<style>\n{css}</style>\n{body}\n</html>', } ``` where `css` is the CSS code, and `body` is the body of the HTML code. In other words, the CSS code is embedded directly within the HTML code, facilitating the straightforward training of a model. ## Data Splits There is only one split, `train`, that contains 822,987 images and codes. ## Dataset Creation This dataset was created using [Mistral-7B-v0.1](https://huggingface.co/mistralai/Mistral-7B-v0.1) to generate random website ideas with the prompt ``` Generate diverse website layout ideas for different companies, each with a unique design element. Examples include: a car company site with a left column, a webpage footer with a centered logo. Explore variations in colors, positions, and company fields. Don't give any explanations or recognition that you have understood the request, just give the list of 10 ideas, with a line break between each. ``` which were then passed to [Deepseek-Coder-33b-Instruct](https://huggingface.co/deepseek-ai/deepseek-coder-33b-instruct) with the prompt ``` Create a very SIMPLE and SHORT website with the following elements: {idea} Be creative with the design, size, position of the elements, columns, etc... Don't give any explanation, just the content of the HTML code `index.html` starting with `<!DOCTYPE html>`, followed by the CSS code `styles.css` starting with `/* Global Styles */`. Write real and short sentences for the paragraphs, don't use Lorem ipsum. When you want to display an image, don't use <img> in the HTML, always display a colored rectangle instead. ``` Following these steps, the HTML and CSS codes were extracted from the outputs of Deepseek-Coder and formatted into the structure `'<html>\n<style>\n{css}</style>\n{body}\n</html>'`. ## Terms of Use By using the dataset, you agree to comply with the original licenses of the source content as well as the dataset license (CC-BY-4.0). Additionally, if you use this dataset to train a Machine Learning model, you agree to disclose your use of the dataset when releasing the model or an ML application using the model. ### Licensing Information License CC-BY-4.0.
HuggingFaceM4/WebSight
[ "size_categories:100K<n<1M", "language:en", "license:cc-by-4.0", "code", "synthetic", "region:us" ]
2024-01-04T10:08:47+00:00
{"language": ["en"], "license": "cc-by-4.0", "size_categories": ["100K<n<1M"], "pretty_name": "WebSigh", "dataset_info": {"features": [{"name": "image", "dtype": "image"}, {"name": "text", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 35386660486.65, "num_examples": 822987}], "download_size": 31394170440, "dataset_size": 35386660486.65}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "tags": ["code", "synthetic"]}
2024-02-02T15:12:06+00:00
[]
[ "en" ]
TAGS #size_categories-100K<n<1M #language-English #license-cc-by-4.0 #code #synthetic #region-us
# Dataset Card for WebSight ## Dataset Version Alpha version, v0.1. ## Dataset Description This dataset consists of 823,000 HTML/CSS codes representing synthetically generated English websites, each accompanied by a corresponding screenshot (rendered with Playwright). This dataset serves as a valuable resource for tasks such as generating UI codes from a screenshot. Notably, it is used for the training of the forthcoming Idefics-2 model, aiming at augmenting its proficiency in generating website code based on a provided screenshot. ## Data Fields An example of a sample appears as follows: where 'css' is the CSS code, and 'body' is the body of the HTML code. In other words, the CSS code is embedded directly within the HTML code, facilitating the straightforward training of a model. ## Data Splits There is only one split, 'train', that contains 822,987 images and codes. ## Dataset Creation This dataset was created using Mistral-7B-v0.1 to generate random website ideas with the prompt which were then passed to Deepseek-Coder-33b-Instruct with the prompt Following these steps, the HTML and CSS codes were extracted from the outputs of Deepseek-Coder and formatted into the structure ''<html>\n<style>\n{css}</style>\n{body}\n</html>''. ## Terms of Use By using the dataset, you agree to comply with the original licenses of the source content as well as the dataset license (CC-BY-4.0). Additionally, if you use this dataset to train a Machine Learning model, you agree to disclose your use of the dataset when releasing the model or an ML application using the model. ### Licensing Information License CC-BY-4.0.
[ "# Dataset Card for WebSight", "## Dataset Version\n\nAlpha version, v0.1.", "## Dataset Description\n\nThis dataset consists of 823,000 HTML/CSS codes representing synthetically generated English websites, each accompanied by a corresponding screenshot (rendered with Playwright).\n\nThis dataset serves as a valuable resource for tasks such as generating UI codes from a screenshot. Notably, it is used for the training of the forthcoming Idefics-2 model, aiming at augmenting its proficiency in generating website code based on a provided screenshot.", "## Data Fields\n\nAn example of a sample appears as follows:\n\nwhere 'css' is the CSS code, and 'body' is the body of the HTML code.\nIn other words, the CSS code is embedded directly within the HTML code, facilitating the straightforward training of a model.", "## Data Splits\n\nThere is only one split, 'train', that contains 822,987 images and codes.", "## Dataset Creation\n\nThis dataset was created using Mistral-7B-v0.1 to generate random website ideas with the prompt\n\nwhich were then passed to Deepseek-Coder-33b-Instruct with the prompt\n\nFollowing these steps, the HTML and CSS codes were extracted from the outputs of Deepseek-Coder and formatted into the structure ''<html>\\n<style>\\n{css}</style>\\n{body}\\n</html>''.", "## Terms of Use\n\nBy using the dataset, you agree to comply with the original licenses of the source content as well as the dataset license (CC-BY-4.0). Additionally, if you use this dataset to train a Machine Learning model, you agree to disclose your use of the dataset when releasing the model or an ML application using the model.", "### Licensing Information\n\nLicense CC-BY-4.0." ]
[ "TAGS\n#size_categories-100K<n<1M #language-English #license-cc-by-4.0 #code #synthetic #region-us \n", "# Dataset Card for WebSight", "## Dataset Version\n\nAlpha version, v0.1.", "## Dataset Description\n\nThis dataset consists of 823,000 HTML/CSS codes representing synthetically generated English websites, each accompanied by a corresponding screenshot (rendered with Playwright).\n\nThis dataset serves as a valuable resource for tasks such as generating UI codes from a screenshot. Notably, it is used for the training of the forthcoming Idefics-2 model, aiming at augmenting its proficiency in generating website code based on a provided screenshot.", "## Data Fields\n\nAn example of a sample appears as follows:\n\nwhere 'css' is the CSS code, and 'body' is the body of the HTML code.\nIn other words, the CSS code is embedded directly within the HTML code, facilitating the straightforward training of a model.", "## Data Splits\n\nThere is only one split, 'train', that contains 822,987 images and codes.", "## Dataset Creation\n\nThis dataset was created using Mistral-7B-v0.1 to generate random website ideas with the prompt\n\nwhich were then passed to Deepseek-Coder-33b-Instruct with the prompt\n\nFollowing these steps, the HTML and CSS codes were extracted from the outputs of Deepseek-Coder and formatted into the structure ''<html>\\n<style>\\n{css}</style>\\n{body}\\n</html>''.", "## Terms of Use\n\nBy using the dataset, you agree to comply with the original licenses of the source content as well as the dataset license (CC-BY-4.0). Additionally, if you use this dataset to train a Machine Learning model, you agree to disclose your use of the dataset when releasing the model or an ML application using the model.", "### Licensing Information\n\nLicense CC-BY-4.0." ]
[ 37, 8, 10, 110, 64, 27, 106, 82, 13 ]
[ "passage: TAGS\n#size_categories-100K<n<1M #language-English #license-cc-by-4.0 #code #synthetic #region-us \n# Dataset Card for WebSight## Dataset Version\n\nAlpha version, v0.1.## Dataset Description\n\nThis dataset consists of 823,000 HTML/CSS codes representing synthetically generated English websites, each accompanied by a corresponding screenshot (rendered with Playwright).\n\nThis dataset serves as a valuable resource for tasks such as generating UI codes from a screenshot. Notably, it is used for the training of the forthcoming Idefics-2 model, aiming at augmenting its proficiency in generating website code based on a provided screenshot.## Data Fields\n\nAn example of a sample appears as follows:\n\nwhere 'css' is the CSS code, and 'body' is the body of the HTML code.\nIn other words, the CSS code is embedded directly within the HTML code, facilitating the straightforward training of a model.## Data Splits\n\nThere is only one split, 'train', that contains 822,987 images and codes.## Dataset Creation\n\nThis dataset was created using Mistral-7B-v0.1 to generate random website ideas with the prompt\n\nwhich were then passed to Deepseek-Coder-33b-Instruct with the prompt\n\nFollowing these steps, the HTML and CSS codes were extracted from the outputs of Deepseek-Coder and formatted into the structure ''<html>\\n<style>\\n{css}</style>\\n{body}\\n</html>''.## Terms of Use\n\nBy using the dataset, you agree to comply with the original licenses of the source content as well as the dataset license (CC-BY-4.0). Additionally, if you use this dataset to train a Machine Learning model, you agree to disclose your use of the dataset when releasing the model or an ML application using the model.### Licensing Information\n\nLicense CC-BY-4.0." ]
366dcdb214f25194e28df46da01911fdef8f7d5f
# Dataset Card for VideoXum ## Table of Contents - [Table of Contents](#table-of-contents) - [Dataset Description](#dataset-description) - [Dataset Summary](#dataset-summary) - [Languages](#languages) - [Dataset Structure](#dataset-structure) - [Data Splits](#data-splits) - [Data Resources](#data-resources) - [Data Fields](#data-fields) - [Annotation Sample](#annotation-sample) - [Citation](#citation) ## Dataset Description - **Homepage:** https://videoxum.github.io/ - **Paper:** https://arxiv.org/abs/2303.12060 ### Dataset Summary The VideoXum dataset represents a novel task in the field of video summarization, extending the scope from single-modal to cross-modal video summarization. This new task focuses on creating video summaries that containing both visual and textual elements with semantic coherence. Built upon the foundation of ActivityNet Captions, VideoXum is a large-scale dataset, including over 14,000 long-duration and open-domain videos. Each video is paired with 10 corresponding video summaries, amounting to a total of 140,000 video-text summary pairs. ### Languages The textual summarization in the dataset are in English. ## Dataset Structure ### Dataset Splits | |train |validation| test | Overall | |-------------|------:|---------:|------:|--------:| | # of videos | 8,000 | 2,001 | 4,000 | 14,001 | ### Dataset Resources - `train_videoxum.json`: annotations of training set - `val_videoxum.json`: annotations of validation set - `test_videoxum.json`: annotations of test set ### Dataset Fields - `video_id`: `str` a unique identifier for the video. - `duration`: `float` total duration of the video in seconds. - `sampled_frames`: `int` the number of frames sampled from source video at 1 fps with a uniform sampling schema. - `timestamps`: `List_float` a list of timestamp pairs, with each pair representing the start and end times of a segment within the video. - `tsum`: `List_str` each textual video summary provides a summarization of the corresponding video segment as defined by the timestamps. - `vsum`: `List_float` each visual video summary corresponds to key frames within each video segment as defined by the timestamps. The dimensions (3 x 10) suggest that each video segment was reannotated by 10 different workers. - `vsum_onehot`: `List_bool` one-hot matrix transformed from 'vsum'. The dimensions (10 x 83) denotes the one-hot labels spanning the entire length of a video, as annotated by 10 workers. ### Annotation Sample For each video, We hire workers to annotate ten shortened video summaries. ``` json { 'video_id': 'v_QOlSCBRmfWY', 'duration': 82.73, 'sampled_frames': 83 'timestamps': [[0.83, 19.86], [17.37, 60.81], [56.26, 79.42]], 'tsum': ['A young woman is seen standing in a room and leads into her dancing.', 'The girl dances around the room while the camera captures her movements.', 'She continues dancing around the room and ends by laying on the floor.'], 'vsum': [[[ 7.01, 12.37], ...], [[41.05, 45.04], ...], [[65.74, 69.28], ...]] (3 x 10 dim) 'vsum_onehot': [[[0,0,0,...,1,1,...], ...], [[0,0,0,...,1,1,...], ...], [[0,0,0,...,1,1,...], ...],] (10 x 83 dim) } ``` ## Citation ```bibtex @article{lin2023videoxum, author = {Lin, Jingyang and Hua, Hang and Chen, Ming and Li, Yikang and Hsiao, Jenhao and Ho, Chiuman and Luo, Jiebo}, title = {VideoXum: Cross-modal Visual and Textural Summarization of Videos}, journal = {IEEE Transactions on Multimedia}, year = {2023}, } ```
jylins/videoxum
[ "task_categories:summarization", "size_categories:10K<n<100K", "language:en", "license:apache-2.0", "cross-modal-video-summarization", "video-summarization", "video-captioning", "arxiv:2303.12060", "region:us" ]
2024-01-04T10:10:15+00:00
{"language": ["en"], "license": "apache-2.0", "size_categories": ["10K<n<100K"], "task_categories": ["summarization"], "pretty_name": "VideoXum", "tags": ["cross-modal-video-summarization", "video-summarization", "video-captioning"]}
2024-01-24T06:11:43+00:00
[ "2303.12060" ]
[ "en" ]
TAGS #task_categories-summarization #size_categories-10K<n<100K #language-English #license-apache-2.0 #cross-modal-video-summarization #video-summarization #video-captioning #arxiv-2303.12060 #region-us
Dataset Card for VideoXum ========================= Table of Contents ----------------- * Table of Contents * Dataset Description + Dataset Summary + Languages * Dataset Structure + Data Splits + Data Resources + Data Fields + Annotation Sample * Citation Dataset Description ------------------- * Homepage: URL * Paper: URL ### Dataset Summary The VideoXum dataset represents a novel task in the field of video summarization, extending the scope from single-modal to cross-modal video summarization. This new task focuses on creating video summaries that containing both visual and textual elements with semantic coherence. Built upon the foundation of ActivityNet Captions, VideoXum is a large-scale dataset, including over 14,000 long-duration and open-domain videos. Each video is paired with 10 corresponding video summaries, amounting to a total of 140,000 video-text summary pairs. ### Languages The textual summarization in the dataset are in English. Dataset Structure ----------------- ### Dataset Splits ### Dataset Resources * 'train\_videoxum.json': annotations of training set * 'val\_videoxum.json': annotations of validation set * 'test\_videoxum.json': annotations of test set ### Dataset Fields * 'video\_id': 'str' a unique identifier for the video. * 'duration': 'float' total duration of the video in seconds. * 'sampled\_frames': 'int' the number of frames sampled from source video at 1 fps with a uniform sampling schema. * 'timestamps': 'List\_float' a list of timestamp pairs, with each pair representing the start and end times of a segment within the video. * 'tsum': 'List\_str' each textual video summary provides a summarization of the corresponding video segment as defined by the timestamps. * 'vsum': 'List\_float' each visual video summary corresponds to key frames within each video segment as defined by the timestamps. The dimensions (3 x 10) suggest that each video segment was reannotated by 10 different workers. * 'vsum\_onehot': 'List\_bool' one-hot matrix transformed from 'vsum'. The dimensions (10 x 83) denotes the one-hot labels spanning the entire length of a video, as annotated by 10 workers. ### Annotation Sample For each video, We hire workers to annotate ten shortened video summaries.
[ "### Dataset Summary\n\n\nThe VideoXum dataset represents a novel task in the field of video summarization, extending the scope from single-modal to cross-modal video summarization. This new task focuses on creating video summaries that containing both visual and textual elements with semantic coherence. Built upon the foundation of ActivityNet Captions, VideoXum is a large-scale dataset, including over 14,000 long-duration and open-domain videos. Each video is paired with 10 corresponding video summaries, amounting to a total of 140,000 video-text summary pairs.", "### Languages\n\n\nThe textual summarization in the dataset are in English.\n\n\nDataset Structure\n-----------------", "### Dataset Splits", "### Dataset Resources\n\n\n* 'train\\_videoxum.json': annotations of training set\n* 'val\\_videoxum.json': annotations of validation set\n* 'test\\_videoxum.json': annotations of test set", "### Dataset Fields\n\n\n* 'video\\_id': 'str' a unique identifier for the video.\n* 'duration': 'float' total duration of the video in seconds.\n* 'sampled\\_frames': 'int' the number of frames sampled from source video at 1 fps with a uniform sampling schema.\n* 'timestamps': 'List\\_float' a list of timestamp pairs, with each pair representing the start and end times of a segment within the video.\n* 'tsum': 'List\\_str' each textual video summary provides a summarization of the corresponding video segment as defined by the timestamps.\n* 'vsum': 'List\\_float' each visual video summary corresponds to key frames within each video segment as defined by the timestamps. The dimensions (3 x 10) suggest that each video segment was reannotated by 10 different workers.\n* 'vsum\\_onehot': 'List\\_bool' one-hot matrix transformed from 'vsum'. The dimensions (10 x 83) denotes the one-hot labels spanning the entire length of a video, as annotated by 10 workers.", "### Annotation Sample\n\n\nFor each video, We hire workers to annotate ten shortened video summaries." ]
[ "TAGS\n#task_categories-summarization #size_categories-10K<n<100K #language-English #license-apache-2.0 #cross-modal-video-summarization #video-summarization #video-captioning #arxiv-2303.12060 #region-us \n", "### Dataset Summary\n\n\nThe VideoXum dataset represents a novel task in the field of video summarization, extending the scope from single-modal to cross-modal video summarization. This new task focuses on creating video summaries that containing both visual and textual elements with semantic coherence. Built upon the foundation of ActivityNet Captions, VideoXum is a large-scale dataset, including over 14,000 long-duration and open-domain videos. Each video is paired with 10 corresponding video summaries, amounting to a total of 140,000 video-text summary pairs.", "### Languages\n\n\nThe textual summarization in the dataset are in English.\n\n\nDataset Structure\n-----------------", "### Dataset Splits", "### Dataset Resources\n\n\n* 'train\\_videoxum.json': annotations of training set\n* 'val\\_videoxum.json': annotations of validation set\n* 'test\\_videoxum.json': annotations of test set", "### Dataset Fields\n\n\n* 'video\\_id': 'str' a unique identifier for the video.\n* 'duration': 'float' total duration of the video in seconds.\n* 'sampled\\_frames': 'int' the number of frames sampled from source video at 1 fps with a uniform sampling schema.\n* 'timestamps': 'List\\_float' a list of timestamp pairs, with each pair representing the start and end times of a segment within the video.\n* 'tsum': 'List\\_str' each textual video summary provides a summarization of the corresponding video segment as defined by the timestamps.\n* 'vsum': 'List\\_float' each visual video summary corresponds to key frames within each video segment as defined by the timestamps. The dimensions (3 x 10) suggest that each video segment was reannotated by 10 different workers.\n* 'vsum\\_onehot': 'List\\_bool' one-hot matrix transformed from 'vsum'. The dimensions (10 x 83) denotes the one-hot labels spanning the entire length of a video, as annotated by 10 workers.", "### Annotation Sample\n\n\nFor each video, We hire workers to annotate ten shortened video summaries." ]
[ 71, 139, 25, 6, 65, 275, 25 ]
[ "passage: TAGS\n#task_categories-summarization #size_categories-10K<n<100K #language-English #license-apache-2.0 #cross-modal-video-summarization #video-summarization #video-captioning #arxiv-2303.12060 #region-us \n### Dataset Summary\n\n\nThe VideoXum dataset represents a novel task in the field of video summarization, extending the scope from single-modal to cross-modal video summarization. This new task focuses on creating video summaries that containing both visual and textual elements with semantic coherence. Built upon the foundation of ActivityNet Captions, VideoXum is a large-scale dataset, including over 14,000 long-duration and open-domain videos. Each video is paired with 10 corresponding video summaries, amounting to a total of 140,000 video-text summary pairs.### Languages\n\n\nThe textual summarization in the dataset are in English.\n\n\nDataset Structure\n-----------------### Dataset Splits### Dataset Resources\n\n\n* 'train\\_videoxum.json': annotations of training set\n* 'val\\_videoxum.json': annotations of validation set\n* 'test\\_videoxum.json': annotations of test set" ]
b67ecc611d5ecb38602353ece518d27d80904c17
## Description Videos made using models trained on Public Domain content. ## Model SVD ## Voice Muted ## Orientation Landscape # Tags - Public Domain # Style 1928 animation movie, movie still # Music 1920 piano ragtime ## Prompt A channel generating short animated video of stories in the public domain, between 2 to 3 minutes Videos are humoristic, like in Charle Chaplin movies. They include tons of funny scenes and jokes about various aspects of the american life. The american dream, household animals doing funny things, people going on their daily life, working, having fun etc
jbilcke-hf/ai-tube-public-domain
[ "license:cc-by-nc-4.0", "region:us" ]
2024-01-04T10:49:26+00:00
{"license": "cc-by-nc-4.0", "pretty_name": "Public Domain"}
2024-01-04T14:49:38+00:00
[]
[]
TAGS #license-cc-by-nc-4.0 #region-us
## Description Videos made using models trained on Public Domain content. ## Model SVD ## Voice Muted ## Orientation Landscape # Tags - Public Domain # Style 1928 animation movie, movie still # Music 1920 piano ragtime ## Prompt A channel generating short animated video of stories in the public domain, between 2 to 3 minutes Videos are humoristic, like in Charle Chaplin movies. They include tons of funny scenes and jokes about various aspects of the american life. The american dream, household animals doing funny things, people going on their daily life, working, having fun etc
[ "## Description\n\nVideos made using models trained on Public Domain content.", "## Model\n\nSVD", "## Voice\n\nMuted", "## Orientation\n\nLandscape", "# Tags\n\n- Public Domain", "# Style\n\n1928 animation movie, movie still", "# Music\n\n1920 piano ragtime", "## Prompt\n\nA channel generating short animated video of stories in the public domain, between 2 to 3 minutes\n\nVideos are humoristic, like in Charle Chaplin movies.\nThey include tons of funny scenes and jokes about various aspects of the american life.\nThe american dream, household animals doing funny things, people going on their daily life,\nworking, having fun etc" ]
[ "TAGS\n#license-cc-by-nc-4.0 #region-us \n", "## Description\n\nVideos made using models trained on Public Domain content.", "## Model\n\nSVD", "## Voice\n\nMuted", "## Orientation\n\nLandscape", "# Tags\n\n- Public Domain", "# Style\n\n1928 animation movie, movie still", "# Music\n\n1920 piano ragtime", "## Prompt\n\nA channel generating short animated video of stories in the public domain, between 2 to 3 minutes\n\nVideos are humoristic, like in Charle Chaplin movies.\nThey include tons of funny scenes and jokes about various aspects of the american life.\nThe american dream, household animals doing funny things, people going on their daily life,\nworking, having fun etc" ]
[ 17, 13, 4, 4, 5, 5, 9, 6, 77 ]
[ "passage: TAGS\n#license-cc-by-nc-4.0 #region-us \n## Description\n\nVideos made using models trained on Public Domain content.## Model\n\nSVD## Voice\n\nMuted## Orientation\n\nLandscape# Tags\n\n- Public Domain# Style\n\n1928 animation movie, movie still# Music\n\n1920 piano ragtime## Prompt\n\nA channel generating short animated video of stories in the public domain, between 2 to 3 minutes\n\nVideos are humoristic, like in Charle Chaplin movies.\nThey include tons of funny scenes and jokes about various aspects of the american life.\nThe american dream, household animals doing funny things, people going on their daily life,\nworking, having fun etc" ]
565999b1d248c945be291d19627342706a7456dc
# Dataset Card for Evaluation run of BEE-spoke-data/smol_llama-220M-openhermes <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [BEE-spoke-data/smol_llama-220M-openhermes](https://huggingface.co/BEE-spoke-data/smol_llama-220M-openhermes) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_BEE-spoke-data__smol_llama-220M-openhermes", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T11:14:24.402269](https://huggingface.co/datasets/open-llm-leaderboard/details_BEE-spoke-data__smol_llama-220M-openhermes/blob/main/results_2024-01-04T11-14-24.402269.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.26124152760742403, "acc_stderr": 0.030861921022096055, "acc_norm": 0.26204860049920253, "acc_norm_stderr": 0.03164810576117213, "mc1": 0.2460220318237454, "mc1_stderr": 0.015077219200662604, "mc2": 0.4308131121933828, "mc2_stderr": 0.015493567057293808 }, "harness|arc:challenge|25": { "acc": 0.2090443686006826, "acc_stderr": 0.011882746987406451, "acc_norm": 0.25170648464163825, "acc_norm_stderr": 0.012682496334042967 }, "harness|hellaswag|10": { "acc": 0.2811192989444334, "acc_stderr": 0.004486268470666339, "acc_norm": 0.28978291177056364, "acc_norm_stderr": 0.004527343651130807 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.17777777777777778, "acc_stderr": 0.033027898599017176, "acc_norm": 0.17777777777777778, "acc_norm_stderr": 0.033027898599017176 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.21710526315789475, "acc_stderr": 0.033550453048829226, "acc_norm": 0.21710526315789475, "acc_norm_stderr": 0.033550453048829226 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.22, "acc_stderr": 0.0416333199893227, "acc_norm": 0.22, "acc_norm_stderr": 0.0416333199893227 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.26037735849056604, "acc_stderr": 0.0270087660907081, "acc_norm": 0.26037735849056604, "acc_norm_stderr": 0.0270087660907081 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2708333333333333, "acc_stderr": 0.03716177437566016, "acc_norm": 0.2708333333333333, "acc_norm_stderr": 0.03716177437566016 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2832369942196532, "acc_stderr": 0.03435568056047875, "acc_norm": 0.2832369942196532, "acc_norm_stderr": 0.03435568056047875 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171452, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171452 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2425531914893617, "acc_stderr": 0.028020226271200217, "acc_norm": 0.2425531914893617, "acc_norm_stderr": 0.028020226271200217 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.04049339297748141, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748141 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2689655172413793, "acc_stderr": 0.036951833116502325, "acc_norm": 0.2689655172413793, "acc_norm_stderr": 0.036951833116502325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24603174603174602, "acc_stderr": 0.022182037202948368, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.022182037202948368 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.16666666666666666, "acc_stderr": 0.03333333333333337, "acc_norm": 0.16666666666666666, "acc_norm_stderr": 0.03333333333333337 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.04560480215720683, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720683 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.3096774193548387, "acc_stderr": 0.026302774983517418, "acc_norm": 0.3096774193548387, "acc_norm_stderr": 0.026302774983517418 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2512315270935961, "acc_stderr": 0.030516530732694436, "acc_norm": 0.2512315270935961, "acc_norm_stderr": 0.030516530732694436 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.19, "acc_stderr": 0.039427724440366234, "acc_norm": 0.19, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.3090909090909091, "acc_stderr": 0.036085410115739666, "acc_norm": 0.3090909090909091, "acc_norm_stderr": 0.036085410115739666 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.21212121212121213, "acc_stderr": 0.029126522834586815, "acc_norm": 0.21212121212121213, "acc_norm_stderr": 0.029126522834586815 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.36787564766839376, "acc_stderr": 0.034801756684660366, "acc_norm": 0.36787564766839376, "acc_norm_stderr": 0.034801756684660366 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.358974358974359, "acc_stderr": 0.024321738484602368, "acc_norm": 0.358974358974359, "acc_norm_stderr": 0.024321738484602368 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712163, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712163 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.28991596638655465, "acc_stderr": 0.029472485833136084, "acc_norm": 0.28991596638655465, "acc_norm_stderr": 0.029472485833136084 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3174311926605505, "acc_stderr": 0.0199571521984605, "acc_norm": 0.3174311926605505, "acc_norm_stderr": 0.0199571521984605 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.3284313725490196, "acc_stderr": 0.032962451101722294, "acc_norm": 0.3284313725490196, "acc_norm_stderr": 0.032962451101722294 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.24050632911392406, "acc_stderr": 0.027820781981149675, "acc_norm": 0.24050632911392406, "acc_norm_stderr": 0.027820781981149675 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.2645739910313901, "acc_stderr": 0.0296051032170383, "acc_norm": 0.2645739910313901, "acc_norm_stderr": 0.0296051032170383 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.2900763358778626, "acc_stderr": 0.03980066246467765, "acc_norm": 0.2900763358778626, "acc_norm_stderr": 0.03980066246467765 }, "harness|hendrycksTest-international_law|5": { "acc": 0.23140495867768596, "acc_stderr": 0.03849856098794088, "acc_norm": 0.23140495867768596, "acc_norm_stderr": 0.03849856098794088 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.21296296296296297, "acc_stderr": 0.0395783547198098, "acc_norm": 0.21296296296296297, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22699386503067484, "acc_stderr": 0.03291099578615769, "acc_norm": 0.22699386503067484, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.17857142857142858, "acc_stderr": 0.036352091215778065, "acc_norm": 0.17857142857142858, "acc_norm_stderr": 0.036352091215778065 }, "harness|hendrycksTest-management|5": { "acc": 0.18446601941747573, "acc_stderr": 0.03840423627288276, "acc_norm": 0.18446601941747573, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.20085470085470086, "acc_stderr": 0.02624677294689048, "acc_norm": 0.20085470085470086, "acc_norm_stderr": 0.02624677294689048 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2541507024265645, "acc_stderr": 0.015569254692045778, "acc_norm": 0.2541507024265645, "acc_norm_stderr": 0.015569254692045778 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.20809248554913296, "acc_stderr": 0.021855255263421802, "acc_norm": 0.20809248554913296, "acc_norm_stderr": 0.021855255263421802 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24509803921568626, "acc_stderr": 0.024630048979824775, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.024630048979824775 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2090032154340836, "acc_stderr": 0.02309314039837422, "acc_norm": 0.2090032154340836, "acc_norm_stderr": 0.02309314039837422 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.19444444444444445, "acc_stderr": 0.022021366100220194, "acc_norm": 0.19444444444444445, "acc_norm_stderr": 0.022021366100220194 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2765957446808511, "acc_stderr": 0.026684564340461004, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.026684564340461004 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24837027379400262, "acc_stderr": 0.011035212598034496, "acc_norm": 0.24837027379400262, "acc_norm_stderr": 0.011035212598034496 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4485294117647059, "acc_stderr": 0.030211479609121593, "acc_norm": 0.4485294117647059, "acc_norm_stderr": 0.030211479609121593 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2222222222222222, "acc_stderr": 0.016819028375736386, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.016819028375736386 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2636363636363636, "acc_stderr": 0.04220224692971987, "acc_norm": 0.2636363636363636, "acc_norm_stderr": 0.04220224692971987 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.33877551020408164, "acc_stderr": 0.030299506562154185, "acc_norm": 0.33877551020408164, "acc_norm_stderr": 0.030299506562154185 }, "harness|hendrycksTest-sociology|5": { "acc": 0.22388059701492538, "acc_stderr": 0.0294752502360172, "acc_norm": 0.22388059701492538, "acc_norm_stderr": 0.0294752502360172 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-virology|5": { "acc": 0.22289156626506024, "acc_stderr": 0.03240004825594689, "acc_norm": 0.22289156626506024, "acc_norm_stderr": 0.03240004825594689 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.26900584795321636, "acc_stderr": 0.0340105262010409, "acc_norm": 0.26900584795321636, "acc_norm_stderr": 0.0340105262010409 }, "harness|truthfulqa:mc|0": { "mc1": 0.2460220318237454, "mc1_stderr": 0.015077219200662604, "mc2": 0.4308131121933828, "mc2_stderr": 0.015493567057293808 }, "harness|winogrande|5": { "acc": 0.5201262825572218, "acc_stderr": 0.014041096664344332 }, "harness|gsm8k|5": { "acc": 0.006065200909780136, "acc_stderr": 0.002138670301460473 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_BEE-spoke-data__smol_llama-220M-openhermes
[ "region:us" ]
2024-01-04T11:16:18+00:00
{"pretty_name": "Evaluation run of BEE-spoke-data/smol_llama-220M-openhermes", "dataset_summary": "Dataset automatically created during the evaluation run of model [BEE-spoke-data/smol_llama-220M-openhermes](https://huggingface.co/BEE-spoke-data/smol_llama-220M-openhermes) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_BEE-spoke-data__smol_llama-220M-openhermes\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T11:14:24.402269](https://huggingface.co/datasets/open-llm-leaderboard/details_BEE-spoke-data__smol_llama-220M-openhermes/blob/main/results_2024-01-04T11-14-24.402269.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.26124152760742403,\n \"acc_stderr\": 0.030861921022096055,\n \"acc_norm\": 0.26204860049920253,\n \"acc_norm_stderr\": 0.03164810576117213,\n \"mc1\": 0.2460220318237454,\n \"mc1_stderr\": 0.015077219200662604,\n \"mc2\": 0.4308131121933828,\n \"mc2_stderr\": 0.015493567057293808\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.2090443686006826,\n \"acc_stderr\": 0.011882746987406451,\n \"acc_norm\": 0.25170648464163825,\n \"acc_norm_stderr\": 0.012682496334042967\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2811192989444334,\n \"acc_stderr\": 0.004486268470666339,\n \"acc_norm\": 0.28978291177056364,\n \"acc_norm_stderr\": 0.004527343651130807\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.17777777777777778,\n \"acc_stderr\": 0.033027898599017176,\n \"acc_norm\": 0.17777777777777778,\n \"acc_norm_stderr\": 0.033027898599017176\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.21710526315789475,\n \"acc_stderr\": 0.033550453048829226,\n \"acc_norm\": 0.21710526315789475,\n \"acc_norm_stderr\": 0.033550453048829226\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.0416333199893227,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.0416333199893227\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.26037735849056604,\n \"acc_stderr\": 0.0270087660907081,\n \"acc_norm\": 0.26037735849056604,\n \"acc_norm_stderr\": 0.0270087660907081\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2708333333333333,\n \"acc_stderr\": 0.03716177437566016,\n \"acc_norm\": 0.2708333333333333,\n \"acc_norm_stderr\": 0.03716177437566016\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2832369942196532,\n \"acc_stderr\": 0.03435568056047875,\n \"acc_norm\": 0.2832369942196532,\n \"acc_norm_stderr\": 0.03435568056047875\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171452,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171452\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536955,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536955\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2425531914893617,\n \"acc_stderr\": 0.028020226271200217,\n \"acc_norm\": 0.2425531914893617,\n \"acc_norm_stderr\": 0.028020226271200217\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.24561403508771928,\n \"acc_stderr\": 0.04049339297748141,\n \"acc_norm\": 0.24561403508771928,\n \"acc_norm_stderr\": 0.04049339297748141\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2689655172413793,\n \"acc_stderr\": 0.036951833116502325,\n \"acc_norm\": 0.2689655172413793,\n \"acc_norm_stderr\": 0.036951833116502325\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.24603174603174602,\n \"acc_stderr\": 0.022182037202948368,\n \"acc_norm\": 0.24603174603174602,\n \"acc_norm_stderr\": 0.022182037202948368\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.16666666666666666,\n \"acc_stderr\": 0.03333333333333337,\n \"acc_norm\": 0.16666666666666666,\n \"acc_norm_stderr\": 0.03333333333333337\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720683,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720683\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.3096774193548387,\n \"acc_stderr\": 0.026302774983517418,\n \"acc_norm\": 0.3096774193548387,\n \"acc_norm_stderr\": 0.026302774983517418\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2512315270935961,\n \"acc_stderr\": 0.030516530732694436,\n \"acc_norm\": 0.2512315270935961,\n \"acc_norm_stderr\": 0.030516530732694436\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.19,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.3090909090909091,\n \"acc_stderr\": 0.036085410115739666,\n \"acc_norm\": 0.3090909090909091,\n \"acc_norm_stderr\": 0.036085410115739666\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.21212121212121213,\n \"acc_stderr\": 0.029126522834586815,\n \"acc_norm\": 0.21212121212121213,\n \"acc_norm_stderr\": 0.029126522834586815\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.36787564766839376,\n \"acc_stderr\": 0.034801756684660366,\n \"acc_norm\": 0.36787564766839376,\n \"acc_norm_stderr\": 0.034801756684660366\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.358974358974359,\n \"acc_stderr\": 0.024321738484602368,\n \"acc_norm\": 0.358974358974359,\n \"acc_norm_stderr\": 0.024321738484602368\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.026719240783712163,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.026719240783712163\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.28991596638655465,\n \"acc_stderr\": 0.029472485833136084,\n \"acc_norm\": 0.28991596638655465,\n \"acc_norm_stderr\": 0.029472485833136084\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.3174311926605505,\n \"acc_stderr\": 0.0199571521984605,\n \"acc_norm\": 0.3174311926605505,\n \"acc_norm_stderr\": 0.0199571521984605\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.3284313725490196,\n \"acc_stderr\": 0.032962451101722294,\n \"acc_norm\": 0.3284313725490196,\n \"acc_norm_stderr\": 0.032962451101722294\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.24050632911392406,\n \"acc_stderr\": 0.027820781981149675,\n \"acc_norm\": 0.24050632911392406,\n \"acc_norm_stderr\": 0.027820781981149675\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.2645739910313901,\n \"acc_stderr\": 0.0296051032170383,\n \"acc_norm\": 0.2645739910313901,\n \"acc_norm_stderr\": 0.0296051032170383\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.2900763358778626,\n \"acc_stderr\": 0.03980066246467765,\n \"acc_norm\": 0.2900763358778626,\n \"acc_norm_stderr\": 0.03980066246467765\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.23140495867768596,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\": 0.23140495867768596,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.21296296296296297,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.21296296296296297,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22699386503067484,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.22699386503067484,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.17857142857142858,\n \"acc_stderr\": 0.036352091215778065,\n \"acc_norm\": 0.17857142857142858,\n \"acc_norm_stderr\": 0.036352091215778065\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.18446601941747573,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.18446601941747573,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.20085470085470086,\n \"acc_stderr\": 0.02624677294689048,\n \"acc_norm\": 0.20085470085470086,\n \"acc_norm_stderr\": 0.02624677294689048\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2541507024265645,\n \"acc_stderr\": 0.015569254692045778,\n \"acc_norm\": 0.2541507024265645,\n \"acc_norm_stderr\": 0.015569254692045778\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.20809248554913296,\n \"acc_stderr\": 0.021855255263421802,\n \"acc_norm\": 0.20809248554913296,\n \"acc_norm_stderr\": 0.021855255263421802\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.024630048979824775,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.024630048979824775\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2090032154340836,\n \"acc_stderr\": 0.02309314039837422,\n \"acc_norm\": 0.2090032154340836,\n \"acc_norm_stderr\": 0.02309314039837422\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.19444444444444445,\n \"acc_stderr\": 0.022021366100220194,\n \"acc_norm\": 0.19444444444444445,\n \"acc_norm_stderr\": 0.022021366100220194\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2765957446808511,\n \"acc_stderr\": 0.026684564340461004,\n \"acc_norm\": 0.2765957446808511,\n \"acc_norm_stderr\": 0.026684564340461004\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24837027379400262,\n \"acc_stderr\": 0.011035212598034496,\n \"acc_norm\": 0.24837027379400262,\n \"acc_norm_stderr\": 0.011035212598034496\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4485294117647059,\n \"acc_stderr\": 0.030211479609121593,\n \"acc_norm\": 0.4485294117647059,\n \"acc_norm_stderr\": 0.030211479609121593\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.016819028375736386,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.016819028375736386\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2636363636363636,\n \"acc_stderr\": 0.04220224692971987,\n \"acc_norm\": 0.2636363636363636,\n \"acc_norm_stderr\": 0.04220224692971987\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.33877551020408164,\n \"acc_stderr\": 0.030299506562154185,\n \"acc_norm\": 0.33877551020408164,\n \"acc_norm_stderr\": 0.030299506562154185\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.22388059701492538,\n \"acc_stderr\": 0.0294752502360172,\n \"acc_norm\": 0.22388059701492538,\n \"acc_norm_stderr\": 0.0294752502360172\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.22289156626506024,\n \"acc_stderr\": 0.03240004825594689,\n \"acc_norm\": 0.22289156626506024,\n \"acc_norm_stderr\": 0.03240004825594689\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.26900584795321636,\n \"acc_stderr\": 0.0340105262010409,\n \"acc_norm\": 0.26900584795321636,\n \"acc_norm_stderr\": 0.0340105262010409\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2460220318237454,\n \"mc1_stderr\": 0.015077219200662604,\n \"mc2\": 0.4308131121933828,\n \"mc2_stderr\": 0.015493567057293808\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5201262825572218,\n \"acc_stderr\": 0.014041096664344332\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.006065200909780136,\n \"acc_stderr\": 0.002138670301460473\n }\n}\n```", "repo_url": "https://huggingface.co/BEE-spoke-data/smol_llama-220M-openhermes", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|arc:challenge|25_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|gsm8k|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hellaswag|10_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T11-14-24.402269.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["**/details_harness|winogrande|5_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T11-14-24.402269.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T11_14_24.402269", "path": ["results_2024-01-04T11-14-24.402269.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T11-14-24.402269.parquet"]}]}]}
2024-01-04T11:17:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of BEE-spoke-data/smol_llama-220M-openhermes Dataset automatically created during the evaluation run of model BEE-spoke-data/smol_llama-220M-openhermes on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T11:14:24.402269(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of BEE-spoke-data/smol_llama-220M-openhermes\n\n\n\nDataset automatically created during the evaluation run of model BEE-spoke-data/smol_llama-220M-openhermes on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T11:14:24.402269(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of BEE-spoke-data/smol_llama-220M-openhermes\n\n\n\nDataset automatically created during the evaluation run of model BEE-spoke-data/smol_llama-220M-openhermes on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T11:14:24.402269(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 201, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of BEE-spoke-data/smol_llama-220M-openhermes\n\n\n\nDataset automatically created during the evaluation run of model BEE-spoke-data/smol_llama-220M-openhermes on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T11:14:24.402269(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
7032e4a5e2dee539b3d5567239fc6ede127d949f
### Key aspects * Event extraction; * [Multi-label classification](https://en.wikipedia.org/wiki/Multi-label_classification); * Biotech news domain; * 31 classes; * 3140 total number of examples; ### Motivation Text classification is a widespread task and a foundational step in numerous information extraction pipelines. However, a notable challenge in current NLP research lies in the oversimplification of benchmarking datasets, which predominantly focus on rudimentary tasks such as topic classification or sentiment analysis. This dataset is specifically curated to address the limitations of existing benchmarks by incorporating rich and complex content derived from the biotech news domain. It encompasses diverse biotech news articles consisting of various events, offering a more nuanced perspective on information extraction challenges. A distinctive feature of this dataset is its emphasis on not only identifying the overarching theme but also extracting information about the target companies associated with the news. This dual-layered approach enhances the dataset's utility for applications that require a deeper understanding of the relationships between events, companies, and the biotech industry as a whole. ### Classes The dataset consists of **31** classes, including None values. * event organization - organizing or participating in an event like a conference, exhibition, etc. * executive statement - a statement or quote from an executive of a company. * regulatory approval - getting approval from regulatory bodies for products, services, trials, etc. * hiring - announcing new hires or appointments at the company. * foundation - establishing a new charitable foundation. * closing - shutting down a facility/office/division or ceasing an initiative. * partnerships & alliances - forming partnerships or strategic alliances with other companies. * expanding industry - expanding into new industries or markets. * new initiatives or programs - announcing new initiatives, programs, or campaigns. * m&a - mergers, acquisitions, or divestitures. * None - no label. * service & product providing - launching or expanding products or services. * event organisation - organizing or participating in an event. * new initiatives & programs - announcing new initiatives or programs. * subsidiary establishment - establishing a new subsidiary company. * product launching & presentation - launching or unveiling a new product. * product updates - announcing updates or new versions of existing products. * executive appointment - appointing a new executive. * alliance & partnership - forming an alliance or partnership. * ipo exit - having an initial public offering or acquisition exit. * article publication - publishing an article. * clinical trial sponsorship - Sponsoring or participating in a clinical trial. * company description - describing or profiling the company. * investment in public company - making an investment in a public company. * other - other events that don't fit into defined categories. * expanding geography - expanding into new geographical areas. * participation in an event - participating in an industry event, conference, etc. * support & philanthropy - philanthropic activities or donations. * department establishment - establishing a new department or division. * funding round - raising a new round of funding. * patent publication - publication of a new patent filing. ### Benchmark We trained various models with binary-cross entropy loss and evaluated them on the test set. | Model | Accuracy | F1 | Precision | Recall | |-----------------|----------|-------|-----------|--------| | DeBERTa-small | 96.58 | 67.69 | 74.18 | 62.19 | | DeBERTa-base | 96.60 | 67.55 | 74.81 | 61.58 | | DeBERTa-large | 96.99 | 74.07 | 73.46 | 74.69 | | SciBERT-uncased | 96.57 | 68.07 | 73.07 | 63.71 | | Flan-T5-base | 96.85 | 71.10 | 75.71 | 67.07 | ### Recommended reading: - Check the general overview of the dataset on Medium - [Finally, a decent multi-label classification benchmark is created: a prominent zero-shot dataset.](https://medium.com/p/4d90c9e1c718) - Try to train your own model on the datset - [ Multi-Label Classification Model From Scratch: Step-by-Step Tutorial ](https://huggingface.co/blog/Valerii-Knowledgator/multi-label-classification) ### Feedback We value your input! Share your feedback and suggestions to help us improve our models and datasets. Fill out the feedback [form](https://forms.gle/5CPFFuLzNWznjcpL7) ### Join Our Discord Connect with our community on Discord for news, support, and discussion about our models and datasets. Join [Discord](https://discord.gg/mfZfwjpB)
knowledgator/events_classification_biotech
[ "task_categories:text-classification", "task_categories:zero-shot-classification", "task_categories:question-answering", "size_categories:n<1K", "language:en", "license:odc-by", "text classification", "biotech", "news", "information-extraction", "multi-label", "region:us" ]
2024-01-04T11:27:02+00:00
{"language": ["en"], "license": "odc-by", "size_categories": ["n<1K"], "task_categories": ["text-classification", "zero-shot-classification", "question-answering"], "pretty_name": "biotechnews", "tags": ["text classification", "biotech", "news", "information-extraction", "multi-label"]}
2024-01-16T08:19:58+00:00
[]
[ "en" ]
TAGS #task_categories-text-classification #task_categories-zero-shot-classification #task_categories-question-answering #size_categories-n<1K #language-English #license-odc-by #text classification #biotech #news #information-extraction #multi-label #region-us
### Key aspects * Event extraction; * Multi-label classification; * Biotech news domain; * 31 classes; * 3140 total number of examples; ### Motivation Text classification is a widespread task and a foundational step in numerous information extraction pipelines. However, a notable challenge in current NLP research lies in the oversimplification of benchmarking datasets, which predominantly focus on rudimentary tasks such as topic classification or sentiment analysis. This dataset is specifically curated to address the limitations of existing benchmarks by incorporating rich and complex content derived from the biotech news domain. It encompasses diverse biotech news articles consisting of various events, offering a more nuanced perspective on information extraction challenges. A distinctive feature of this dataset is its emphasis on not only identifying the overarching theme but also extracting information about the target companies associated with the news. This dual-layered approach enhances the dataset's utility for applications that require a deeper understanding of the relationships between events, companies, and the biotech industry as a whole. ### Classes The dataset consists of 31 classes, including None values. * event organization - organizing or participating in an event like a conference, exhibition, etc. * executive statement - a statement or quote from an executive of a company. * regulatory approval - getting approval from regulatory bodies for products, services, trials, etc. * hiring - announcing new hires or appointments at the company. * foundation - establishing a new charitable foundation. * closing - shutting down a facility/office/division or ceasing an initiative. * partnerships & alliances - forming partnerships or strategic alliances with other companies. * expanding industry - expanding into new industries or markets. * new initiatives or programs - announcing new initiatives, programs, or campaigns. * m&a - mergers, acquisitions, or divestitures. * None - no label. * service & product providing - launching or expanding products or services. * event organisation - organizing or participating in an event. * new initiatives & programs - announcing new initiatives or programs. * subsidiary establishment - establishing a new subsidiary company. * product launching & presentation - launching or unveiling a new product. * product updates - announcing updates or new versions of existing products. * executive appointment - appointing a new executive. * alliance & partnership - forming an alliance or partnership. * ipo exit - having an initial public offering or acquisition exit. * article publication - publishing an article. * clinical trial sponsorship - Sponsoring or participating in a clinical trial. * company description - describing or profiling the company. * investment in public company - making an investment in a public company. * other - other events that don't fit into defined categories. * expanding geography - expanding into new geographical areas. * participation in an event - participating in an industry event, conference, etc. * support & philanthropy - philanthropic activities or donations. * department establishment - establishing a new department or division. * funding round - raising a new round of funding. * patent publication - publication of a new patent filing. ### Benchmark We trained various models with binary-cross entropy loss and evaluated them on the test set. ### Recommended reading: * Check the general overview of the dataset on Medium - Finally, a decent multi-label classification benchmark is created: a prominent zero-shot dataset. * Try to train your own model on the datset - Multi-Label Classification Model From Scratch: Step-by-Step Tutorial ### Feedback We value your input! Share your feedback and suggestions to help us improve our models and datasets. Fill out the feedback form ### Join Our Discord Connect with our community on Discord for news, support, and discussion about our models and datasets. Join Discord
[ "### Key aspects\n\n\n* Event extraction;\n* Multi-label classification;\n* Biotech news domain;\n* 31 classes;\n* 3140 total number of examples;", "### Motivation\n\n\nText classification is a widespread task and a foundational step in numerous information extraction pipelines. However, a notable challenge in current NLP research lies in the oversimplification of benchmarking datasets, which predominantly focus on rudimentary tasks such as topic classification or sentiment analysis.\n\n\nThis dataset is specifically curated to address the limitations of existing benchmarks by incorporating rich and complex content derived from the biotech news domain. It encompasses diverse biotech news articles consisting of various events, offering a more nuanced perspective on information extraction challenges.\n\n\nA distinctive feature of this dataset is its emphasis on not only identifying the overarching theme but also extracting information about the target companies associated with the news. This dual-layered approach enhances the dataset's utility for applications that require a deeper understanding of the relationships between events, companies, and the biotech industry as a whole.", "### Classes\n\n\nThe dataset consists of 31 classes, including None values.\n\n\n* event organization - organizing or participating in an event like a conference, exhibition, etc.\n* executive statement - a statement or quote from an executive of a company.\n* regulatory approval - getting approval from regulatory bodies for products, services, trials, etc.\n* hiring - announcing new hires or appointments at the company.\n* foundation - establishing a new charitable foundation.\n* closing - shutting down a facility/office/division or ceasing an initiative.\n* partnerships & alliances - forming partnerships or strategic alliances with other companies.\n* expanding industry - expanding into new industries or markets.\n* new initiatives or programs - announcing new initiatives, programs, or campaigns.\n* m&a - mergers, acquisitions, or divestitures.\n* None - no label.\n* service & product providing - launching or expanding products or services.\n* event organisation - organizing or participating in an event.\n* new initiatives & programs - announcing new initiatives or programs.\n* subsidiary establishment - establishing a new subsidiary company.\n* product launching & presentation - launching or unveiling a new product.\n* product updates - announcing updates or new versions of existing products.\n* executive appointment - appointing a new executive.\n* alliance & partnership - forming an alliance or partnership.\n* ipo exit - having an initial public offering or acquisition exit.\n* article publication - publishing an article.\n* clinical trial sponsorship - Sponsoring or participating in a clinical trial.\n* company description - describing or profiling the company.\n* investment in public company - making an investment in a public company.\n* other - other events that don't fit into defined categories.\n* expanding geography - expanding into new geographical areas.\n* participation in an event - participating in an industry event, conference, etc.\n* support & philanthropy - philanthropic activities or donations.\n* department establishment - establishing a new department or division.\n* funding round - raising a new round of funding.\n* patent publication - publication of a new patent filing.", "### Benchmark\n\n\nWe trained various models with binary-cross entropy loss and evaluated them on the test set.", "### Recommended reading:\n\n\n* Check the general overview of the dataset on Medium - Finally, a decent multi-label classification benchmark is created: a prominent zero-shot dataset.\n* Try to train your own model on the datset - Multi-Label Classification Model From Scratch: Step-by-Step Tutorial", "### Feedback\n\n\nWe value your input! Share your feedback and suggestions to help us improve our models and datasets.\nFill out the feedback form", "### Join Our Discord\n\n\nConnect with our community on Discord for news, support, and discussion about our models and datasets.\nJoin Discord" ]
[ "TAGS\n#task_categories-text-classification #task_categories-zero-shot-classification #task_categories-question-answering #size_categories-n<1K #language-English #license-odc-by #text classification #biotech #news #information-extraction #multi-label #region-us \n", "### Key aspects\n\n\n* Event extraction;\n* Multi-label classification;\n* Biotech news domain;\n* 31 classes;\n* 3140 total number of examples;", "### Motivation\n\n\nText classification is a widespread task and a foundational step in numerous information extraction pipelines. However, a notable challenge in current NLP research lies in the oversimplification of benchmarking datasets, which predominantly focus on rudimentary tasks such as topic classification or sentiment analysis.\n\n\nThis dataset is specifically curated to address the limitations of existing benchmarks by incorporating rich and complex content derived from the biotech news domain. It encompasses diverse biotech news articles consisting of various events, offering a more nuanced perspective on information extraction challenges.\n\n\nA distinctive feature of this dataset is its emphasis on not only identifying the overarching theme but also extracting information about the target companies associated with the news. This dual-layered approach enhances the dataset's utility for applications that require a deeper understanding of the relationships between events, companies, and the biotech industry as a whole.", "### Classes\n\n\nThe dataset consists of 31 classes, including None values.\n\n\n* event organization - organizing or participating in an event like a conference, exhibition, etc.\n* executive statement - a statement or quote from an executive of a company.\n* regulatory approval - getting approval from regulatory bodies for products, services, trials, etc.\n* hiring - announcing new hires or appointments at the company.\n* foundation - establishing a new charitable foundation.\n* closing - shutting down a facility/office/division or ceasing an initiative.\n* partnerships & alliances - forming partnerships or strategic alliances with other companies.\n* expanding industry - expanding into new industries or markets.\n* new initiatives or programs - announcing new initiatives, programs, or campaigns.\n* m&a - mergers, acquisitions, or divestitures.\n* None - no label.\n* service & product providing - launching or expanding products or services.\n* event organisation - organizing or participating in an event.\n* new initiatives & programs - announcing new initiatives or programs.\n* subsidiary establishment - establishing a new subsidiary company.\n* product launching & presentation - launching or unveiling a new product.\n* product updates - announcing updates or new versions of existing products.\n* executive appointment - appointing a new executive.\n* alliance & partnership - forming an alliance or partnership.\n* ipo exit - having an initial public offering or acquisition exit.\n* article publication - publishing an article.\n* clinical trial sponsorship - Sponsoring or participating in a clinical trial.\n* company description - describing or profiling the company.\n* investment in public company - making an investment in a public company.\n* other - other events that don't fit into defined categories.\n* expanding geography - expanding into new geographical areas.\n* participation in an event - participating in an industry event, conference, etc.\n* support & philanthropy - philanthropic activities or donations.\n* department establishment - establishing a new department or division.\n* funding round - raising a new round of funding.\n* patent publication - publication of a new patent filing.", "### Benchmark\n\n\nWe trained various models with binary-cross entropy loss and evaluated them on the test set.", "### Recommended reading:\n\n\n* Check the general overview of the dataset on Medium - Finally, a decent multi-label classification benchmark is created: a prominent zero-shot dataset.\n* Try to train your own model on the datset - Multi-Label Classification Model From Scratch: Step-by-Step Tutorial", "### Feedback\n\n\nWe value your input! Share your feedback and suggestions to help us improve our models and datasets.\nFill out the feedback form", "### Join Our Discord\n\n\nConnect with our community on Discord for news, support, and discussion about our models and datasets.\nJoin Discord" ]
[ 82, 35, 208, 493, 27, 70, 29, 31 ]
[ "passage: TAGS\n#task_categories-text-classification #task_categories-zero-shot-classification #task_categories-question-answering #size_categories-n<1K #language-English #license-odc-by #text classification #biotech #news #information-extraction #multi-label #region-us \n### Key aspects\n\n\n* Event extraction;\n* Multi-label classification;\n* Biotech news domain;\n* 31 classes;\n* 3140 total number of examples;### Motivation\n\n\nText classification is a widespread task and a foundational step in numerous information extraction pipelines. However, a notable challenge in current NLP research lies in the oversimplification of benchmarking datasets, which predominantly focus on rudimentary tasks such as topic classification or sentiment analysis.\n\n\nThis dataset is specifically curated to address the limitations of existing benchmarks by incorporating rich and complex content derived from the biotech news domain. It encompasses diverse biotech news articles consisting of various events, offering a more nuanced perspective on information extraction challenges.\n\n\nA distinctive feature of this dataset is its emphasis on not only identifying the overarching theme but also extracting information about the target companies associated with the news. This dual-layered approach enhances the dataset's utility for applications that require a deeper understanding of the relationships between events, companies, and the biotech industry as a whole." ]
e4c17d67e9c73f53fc9b66a6a4c3bab7ce188f9d
Data from [https://wenda.12371.cn/liebiao.php](https://wenda.12371.cn/liebiao.php)
crazysteeaam/Party_Affairs_Response
[ "task_categories:text-generation", "size_categories:100K<n<1M", "language:zh", "legal", "region:us" ]
2024-01-04T11:35:16+00:00
{"language": ["zh"], "size_categories": ["100K<n<1M"], "task_categories": ["text-generation"], "tags": ["legal"]}
2024-01-05T06:32:45+00:00
[]
[ "zh" ]
TAGS #task_categories-text-generation #size_categories-100K<n<1M #language-Chinese #legal #region-us
Data from URL
[]
[ "TAGS\n#task_categories-text-generation #size_categories-100K<n<1M #language-Chinese #legal #region-us \n" ]
[ 36 ]
[ "passage: TAGS\n#task_categories-text-generation #size_categories-100K<n<1M #language-Chinese #legal #region-us \n" ]
bd58e60f5fadcedde873390098c2528ce23e3eed
# Dataset Card for Evaluation run of TinyLlama/TinyLlama-1.1B-Chat-v1.0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [TinyLlama/TinyLlama-1.1B-Chat-v1.0](https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TinyLlama__TinyLlama-1.1B-Chat-v1.0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T11:44:55.514182](https://huggingface.co/datasets/open-llm-leaderboard/details_TinyLlama__TinyLlama-1.1B-Chat-v1.0/blob/main/results_2024-01-04T11-44-55.514182.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2609421720124211, "acc_stderr": 0.03091039790056125, "acc_norm": 0.26176871498253385, "acc_norm_stderr": 0.0316552369448013, "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931586, "mc2": 0.37475758071242915, "mc2_stderr": 0.013911882093015021 }, "harness|arc:challenge|25": { "acc": 0.34982935153583616, "acc_stderr": 0.01393680921215828, "acc_norm": 0.3609215017064846, "acc_norm_stderr": 0.01403476138617546 }, "harness|hellaswag|10": { "acc": 0.4592710615415256, "acc_stderr": 0.00497319929633997, "acc_norm": 0.6110336586337383, "acc_norm_stderr": 0.004865193237024058 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816505, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.17037037037037037, "acc_stderr": 0.032477811859955935, "acc_norm": 0.17037037037037037, "acc_norm_stderr": 0.032477811859955935 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.17763157894736842, "acc_stderr": 0.031103182383123387, "acc_norm": 0.17763157894736842, "acc_norm_stderr": 0.031103182383123387 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.27547169811320754, "acc_stderr": 0.02749566368372406, "acc_norm": 0.27547169811320754, "acc_norm_stderr": 0.02749566368372406 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2361111111111111, "acc_stderr": 0.03551446610810826, "acc_norm": 0.2361111111111111, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.1907514450867052, "acc_stderr": 0.02995785132986934, "acc_norm": 0.1907514450867052, "acc_norm_stderr": 0.02995785132986934 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179961, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179961 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2723404255319149, "acc_stderr": 0.029101290698386708, "acc_norm": 0.2723404255319149, "acc_norm_stderr": 0.029101290698386708 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.23684210526315788, "acc_stderr": 0.039994238792813344, "acc_norm": 0.23684210526315788, "acc_norm_stderr": 0.039994238792813344 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.23448275862068965, "acc_stderr": 0.035306258743465914, "acc_norm": 0.23448275862068965, "acc_norm_stderr": 0.035306258743465914 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2857142857142857, "acc_stderr": 0.023266512213730575, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.023266512213730575 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23015873015873015, "acc_stderr": 0.03764950879790606, "acc_norm": 0.23015873015873015, "acc_norm_stderr": 0.03764950879790606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.24838709677419354, "acc_stderr": 0.024580028921481006, "acc_norm": 0.24838709677419354, "acc_norm_stderr": 0.024580028921481006 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2512315270935961, "acc_stderr": 0.030516530732694433, "acc_norm": 0.2512315270935961, "acc_norm_stderr": 0.030516530732694433 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.24, "acc_stderr": 0.04292346959909282, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909282 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.24848484848484848, "acc_stderr": 0.03374402644139405, "acc_norm": 0.24848484848484848, "acc_norm_stderr": 0.03374402644139405 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.22727272727272727, "acc_stderr": 0.029857515673386407, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.029857515673386407 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.22279792746113988, "acc_stderr": 0.03003114797764154, "acc_norm": 0.22279792746113988, "acc_norm_stderr": 0.03003114797764154 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2717948717948718, "acc_stderr": 0.022556551010132354, "acc_norm": 0.2717948717948718, "acc_norm_stderr": 0.022556551010132354 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.026719240783712177, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.026719240783712177 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.24369747899159663, "acc_stderr": 0.027886828078380544, "acc_norm": 0.24369747899159663, "acc_norm_stderr": 0.027886828078380544 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2052980132450331, "acc_stderr": 0.03297986648473836, "acc_norm": 0.2052980132450331, "acc_norm_stderr": 0.03297986648473836 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.23853211009174313, "acc_stderr": 0.01827257581023187, "acc_norm": 0.23853211009174313, "acc_norm_stderr": 0.01827257581023187 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4166666666666667, "acc_stderr": 0.03362277436608043, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.03362277436608043 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2320675105485232, "acc_stderr": 0.02747974455080851, "acc_norm": 0.2320675105485232, "acc_norm_stderr": 0.02747974455080851 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.35874439461883406, "acc_stderr": 0.032190792004199956, "acc_norm": 0.35874439461883406, "acc_norm_stderr": 0.032190792004199956 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.24427480916030533, "acc_stderr": 0.03768335959728745, "acc_norm": 0.24427480916030533, "acc_norm_stderr": 0.03768335959728745 }, "harness|hendrycksTest-international_law|5": { "acc": 0.256198347107438, "acc_stderr": 0.03984979653302871, "acc_norm": 0.256198347107438, "acc_norm_stderr": 0.03984979653302871 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.23148148148148148, "acc_stderr": 0.04077494709252626, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.04077494709252626 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22699386503067484, "acc_stderr": 0.032910995786157686, "acc_norm": 0.22699386503067484, "acc_norm_stderr": 0.032910995786157686 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.29464285714285715, "acc_stderr": 0.04327040932578728, "acc_norm": 0.29464285714285715, "acc_norm_stderr": 0.04327040932578728 }, "harness|hendrycksTest-management|5": { "acc": 0.2524271844660194, "acc_stderr": 0.04301250399690875, "acc_norm": 0.2524271844660194, "acc_norm_stderr": 0.04301250399690875 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2777777777777778, "acc_stderr": 0.02934311479809448, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.02934311479809448 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2822477650063857, "acc_stderr": 0.01609530296987856, "acc_norm": 0.2822477650063857, "acc_norm_stderr": 0.01609530296987856 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.23121387283236994, "acc_stderr": 0.022698657167855716, "acc_norm": 0.23121387283236994, "acc_norm_stderr": 0.022698657167855716 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808835, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808835 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24509803921568626, "acc_stderr": 0.024630048979824765, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.024630048979824765 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.26688102893890675, "acc_stderr": 0.025122637608816646, "acc_norm": 0.26688102893890675, "acc_norm_stderr": 0.025122637608816646 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.25617283950617287, "acc_stderr": 0.0242885336377261, "acc_norm": 0.25617283950617287, "acc_norm_stderr": 0.0242885336377261 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.24822695035460993, "acc_stderr": 0.0257700156442904, "acc_norm": 0.24822695035460993, "acc_norm_stderr": 0.0257700156442904 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2379400260756193, "acc_stderr": 0.01087570078769424, "acc_norm": 0.2379400260756193, "acc_norm_stderr": 0.01087570078769424 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.2536764705882353, "acc_stderr": 0.026431329870789524, "acc_norm": 0.2536764705882353, "acc_norm_stderr": 0.026431329870789524 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2679738562091503, "acc_stderr": 0.017917974069594722, "acc_norm": 0.2679738562091503, "acc_norm_stderr": 0.017917974069594722 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.3, "acc_stderr": 0.04389311454644286, "acc_norm": 0.3, "acc_norm_stderr": 0.04389311454644286 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.14285714285714285, "acc_stderr": 0.022401787435256386, "acc_norm": 0.14285714285714285, "acc_norm_stderr": 0.022401787435256386 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24378109452736318, "acc_stderr": 0.030360490154014645, "acc_norm": 0.24378109452736318, "acc_norm_stderr": 0.030360490154014645 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-virology|5": { "acc": 0.3313253012048193, "acc_stderr": 0.03664314777288087, "acc_norm": 0.3313253012048193, "acc_norm_stderr": 0.03664314777288087 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.30409356725146197, "acc_stderr": 0.03528211258245231, "acc_norm": 0.30409356725146197, "acc_norm_stderr": 0.03528211258245231 }, "harness|truthfulqa:mc|0": { "mc1": 0.23378212974296206, "mc1_stderr": 0.014816195991931586, "mc2": 0.37475758071242915, "mc2_stderr": 0.013911882093015021 }, "harness|winogrande|5": { "acc": 0.6124704025256511, "acc_stderr": 0.013692354636016766 }, "harness|gsm8k|5": { "acc": 0.02350265352539803, "acc_stderr": 0.004172883669643949 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_TinyLlama__TinyLlama-1.1B-Chat-v1.0
[ "region:us" ]
2024-01-04T11:40:49+00:00
{"pretty_name": "Evaluation run of TinyLlama/TinyLlama-1.1B-Chat-v1.0", "dataset_summary": "Dataset automatically created during the evaluation run of model [TinyLlama/TinyLlama-1.1B-Chat-v1.0](https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TinyLlama__TinyLlama-1.1B-Chat-v1.0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T11:44:55.514182](https://huggingface.co/datasets/open-llm-leaderboard/details_TinyLlama__TinyLlama-1.1B-Chat-v1.0/blob/main/results_2024-01-04T11-44-55.514182.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2609421720124211,\n \"acc_stderr\": 0.03091039790056125,\n \"acc_norm\": 0.26176871498253385,\n \"acc_norm_stderr\": 0.0316552369448013,\n \"mc1\": 0.23378212974296206,\n \"mc1_stderr\": 0.014816195991931586,\n \"mc2\": 0.37475758071242915,\n \"mc2_stderr\": 0.013911882093015021\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.34982935153583616,\n \"acc_stderr\": 0.01393680921215828,\n \"acc_norm\": 0.3609215017064846,\n \"acc_norm_stderr\": 0.01403476138617546\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4592710615415256,\n \"acc_stderr\": 0.00497319929633997,\n \"acc_norm\": 0.6110336586337383,\n \"acc_norm_stderr\": 0.004865193237024058\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.17037037037037037,\n \"acc_stderr\": 0.032477811859955935,\n \"acc_norm\": 0.17037037037037037,\n \"acc_norm_stderr\": 0.032477811859955935\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.17763157894736842,\n \"acc_stderr\": 0.031103182383123387,\n \"acc_norm\": 0.17763157894736842,\n \"acc_norm_stderr\": 0.031103182383123387\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.27547169811320754,\n \"acc_stderr\": 0.02749566368372406,\n \"acc_norm\": 0.27547169811320754,\n \"acc_norm_stderr\": 0.02749566368372406\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2361111111111111,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.2361111111111111,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.1907514450867052,\n \"acc_stderr\": 0.02995785132986934,\n \"acc_norm\": 0.1907514450867052,\n \"acc_norm_stderr\": 0.02995785132986934\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179961,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179961\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2723404255319149,\n \"acc_stderr\": 0.029101290698386708,\n \"acc_norm\": 0.2723404255319149,\n \"acc_norm_stderr\": 0.029101290698386708\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.23684210526315788,\n \"acc_stderr\": 0.039994238792813344,\n \"acc_norm\": 0.23684210526315788,\n \"acc_norm_stderr\": 0.039994238792813344\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.23448275862068965,\n \"acc_stderr\": 0.035306258743465914,\n \"acc_norm\": 0.23448275862068965,\n \"acc_norm_stderr\": 0.035306258743465914\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.023266512213730575,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.023266512213730575\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23015873015873015,\n \"acc_stderr\": 0.03764950879790606,\n \"acc_norm\": 0.23015873015873015,\n \"acc_norm_stderr\": 0.03764950879790606\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.24838709677419354,\n \"acc_stderr\": 0.024580028921481006,\n \"acc_norm\": 0.24838709677419354,\n \"acc_norm_stderr\": 0.024580028921481006\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2512315270935961,\n \"acc_stderr\": 0.030516530732694433,\n \"acc_norm\": 0.2512315270935961,\n \"acc_norm_stderr\": 0.030516530732694433\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.24848484848484848,\n \"acc_stderr\": 0.03374402644139405,\n \"acc_norm\": 0.24848484848484848,\n \"acc_norm_stderr\": 0.03374402644139405\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.22727272727272727,\n \"acc_stderr\": 0.029857515673386407,\n \"acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.029857515673386407\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.22279792746113988,\n \"acc_stderr\": 0.03003114797764154,\n \"acc_norm\": 0.22279792746113988,\n \"acc_norm_stderr\": 0.03003114797764154\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2717948717948718,\n \"acc_stderr\": 0.022556551010132354,\n \"acc_norm\": 0.2717948717948718,\n \"acc_norm_stderr\": 0.022556551010132354\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.026719240783712177,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.026719240783712177\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.24369747899159663,\n \"acc_stderr\": 0.027886828078380544,\n \"acc_norm\": 0.24369747899159663,\n \"acc_norm_stderr\": 0.027886828078380544\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2052980132450331,\n \"acc_stderr\": 0.03297986648473836,\n \"acc_norm\": 0.2052980132450331,\n \"acc_norm_stderr\": 0.03297986648473836\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.23853211009174313,\n \"acc_stderr\": 0.01827257581023187,\n \"acc_norm\": 0.23853211009174313,\n \"acc_norm_stderr\": 0.01827257581023187\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4166666666666667,\n \"acc_stderr\": 0.03362277436608043,\n \"acc_norm\": 0.4166666666666667,\n \"acc_norm_stderr\": 0.03362277436608043\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2320675105485232,\n \"acc_stderr\": 0.02747974455080851,\n \"acc_norm\": 0.2320675105485232,\n \"acc_norm_stderr\": 0.02747974455080851\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.35874439461883406,\n \"acc_stderr\": 0.032190792004199956,\n \"acc_norm\": 0.35874439461883406,\n \"acc_norm_stderr\": 0.032190792004199956\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.24427480916030533,\n \"acc_stderr\": 0.03768335959728745,\n \"acc_norm\": 0.24427480916030533,\n \"acc_norm_stderr\": 0.03768335959728745\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.256198347107438,\n \"acc_stderr\": 0.03984979653302871,\n \"acc_norm\": 0.256198347107438,\n \"acc_norm_stderr\": 0.03984979653302871\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.23148148148148148,\n \"acc_stderr\": 0.04077494709252626,\n \"acc_norm\": 0.23148148148148148,\n \"acc_norm_stderr\": 0.04077494709252626\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22699386503067484,\n \"acc_stderr\": 0.032910995786157686,\n \"acc_norm\": 0.22699386503067484,\n \"acc_norm_stderr\": 0.032910995786157686\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.29464285714285715,\n \"acc_stderr\": 0.04327040932578728,\n \"acc_norm\": 0.29464285714285715,\n \"acc_norm_stderr\": 0.04327040932578728\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.2524271844660194,\n \"acc_stderr\": 0.04301250399690875,\n \"acc_norm\": 0.2524271844660194,\n \"acc_norm_stderr\": 0.04301250399690875\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.02934311479809448,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.02934311479809448\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2822477650063857,\n \"acc_stderr\": 0.01609530296987856,\n \"acc_norm\": 0.2822477650063857,\n \"acc_norm_stderr\": 0.01609530296987856\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.23121387283236994,\n \"acc_stderr\": 0.022698657167855716,\n \"acc_norm\": 0.23121387283236994,\n \"acc_norm_stderr\": 0.022698657167855716\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n \"acc_stderr\": 0.014422292204808835,\n \"acc_norm\": 0.24692737430167597,\n \"acc_norm_stderr\": 0.014422292204808835\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.024630048979824765,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.024630048979824765\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.26688102893890675,\n \"acc_stderr\": 0.025122637608816646,\n \"acc_norm\": 0.26688102893890675,\n \"acc_norm_stderr\": 0.025122637608816646\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.25617283950617287,\n \"acc_stderr\": 0.0242885336377261,\n \"acc_norm\": 0.25617283950617287,\n \"acc_norm_stderr\": 0.0242885336377261\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.24822695035460993,\n \"acc_stderr\": 0.0257700156442904,\n \"acc_norm\": 0.24822695035460993,\n \"acc_norm_stderr\": 0.0257700156442904\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2379400260756193,\n \"acc_stderr\": 0.01087570078769424,\n \"acc_norm\": 0.2379400260756193,\n \"acc_norm_stderr\": 0.01087570078769424\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.2536764705882353,\n \"acc_stderr\": 0.026431329870789524,\n \"acc_norm\": 0.2536764705882353,\n \"acc_norm_stderr\": 0.026431329870789524\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2679738562091503,\n \"acc_stderr\": 0.017917974069594722,\n \"acc_norm\": 0.2679738562091503,\n \"acc_norm_stderr\": 0.017917974069594722\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.04389311454644286,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.04389311454644286\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.14285714285714285,\n \"acc_stderr\": 0.022401787435256386,\n \"acc_norm\": 0.14285714285714285,\n \"acc_norm_stderr\": 0.022401787435256386\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24378109452736318,\n \"acc_stderr\": 0.030360490154014645,\n \"acc_norm\": 0.24378109452736318,\n \"acc_norm_stderr\": 0.030360490154014645\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3313253012048193,\n \"acc_stderr\": 0.03664314777288087,\n \"acc_norm\": 0.3313253012048193,\n \"acc_norm_stderr\": 0.03664314777288087\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.30409356725146197,\n \"acc_stderr\": 0.03528211258245231,\n \"acc_norm\": 0.30409356725146197,\n \"acc_norm_stderr\": 0.03528211258245231\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23378212974296206,\n \"mc1_stderr\": 0.014816195991931586,\n \"mc2\": 0.37475758071242915,\n \"mc2_stderr\": 0.013911882093015021\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6124704025256511,\n \"acc_stderr\": 0.013692354636016766\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.02350265352539803,\n \"acc_stderr\": 0.004172883669643949\n }\n}\n```", "repo_url": "https://huggingface.co/TinyLlama/TinyLlama-1.1B-Chat-v1.0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|arc:challenge|25_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|arc:challenge|25_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|gsm8k|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|gsm8k|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hellaswag|10_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hellaswag|10_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T11-39-03.937670.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T11-44-55.514182.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["**/details_harness|winogrande|5_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["**/details_harness|winogrande|5_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T11-44-55.514182.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T11_39_03.937670", "path": ["results_2024-01-04T11-39-03.937670.parquet"]}, {"split": "2024_01_04T11_44_55.514182", "path": ["results_2024-01-04T11-44-55.514182.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T11-44-55.514182.parquet"]}]}]}
2024-01-04T11:47:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TinyLlama/TinyLlama-1.1B-Chat-v1.0 Dataset automatically created during the evaluation run of model TinyLlama/TinyLlama-1.1B-Chat-v1.0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T11:44:55.514182(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of TinyLlama/TinyLlama-1.1B-Chat-v1.0\n\n\n\nDataset automatically created during the evaluation run of model TinyLlama/TinyLlama-1.1B-Chat-v1.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T11:44:55.514182(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TinyLlama/TinyLlama-1.1B-Chat-v1.0\n\n\n\nDataset automatically created during the evaluation run of model TinyLlama/TinyLlama-1.1B-Chat-v1.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T11:44:55.514182(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TinyLlama/TinyLlama-1.1B-Chat-v1.0\n\n\n\nDataset automatically created during the evaluation run of model TinyLlama/TinyLlama-1.1B-Chat-v1.0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T11:44:55.514182(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
e38cdacb05d96a5d71cea7c52b61bc5ab592a57a
# Dataset Card for "argilla-math-preferences-it" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
mii-llm/argilla-math-preferences-it
[ "region:us" ]
2024-01-04T11:47:07+00:00
{"dataset_info": {"features": [{"name": "input", "dtype": "string"}, {"name": "output", "dtype": "string"}, {"name": "chosen_rating", "dtype": "float64"}, {"name": "rejected", "dtype": "string"}, {"name": "rejected_rating", "dtype": "float64"}, {"name": "metadata", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 7338944, "num_examples": 2345}], "download_size": 3297769, "dataset_size": 7338944}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-30T19:21:32+00:00
[]
[]
TAGS #region-us
# Dataset Card for "argilla-math-preferences-it" More Information needed
[ "# Dataset Card for \"argilla-math-preferences-it\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"argilla-math-preferences-it\"\n\nMore Information needed" ]
[ 6, 20 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"argilla-math-preferences-it\"\n\nMore Information needed" ]
343c9398d045207e3991a01dad2a86d467edeff9
# Dataset Card for Evaluation run of jeonsworld/CarbonVillain-en-10.7B-v5 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jeonsworld/CarbonVillain-en-10.7B-v5](https://huggingface.co/jeonsworld/CarbonVillain-en-10.7B-v5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jeonsworld__CarbonVillain-en-10.7B-v5", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T11:46:21.966260](https://huggingface.co/datasets/open-llm-leaderboard/details_jeonsworld__CarbonVillain-en-10.7B-v5/blob/main/results_2024-01-04T11-46-21.966260.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6679993758554225, "acc_stderr": 0.031636184711630856, "acc_norm": 0.6689272065570435, "acc_norm_stderr": 0.0322795610683572, "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314747, "mc2": 0.7196929772646223, "mc2_stderr": 0.014983410075230245 }, "harness|arc:challenge|25": { "acc": 0.6851535836177475, "acc_stderr": 0.01357265770308495, "acc_norm": 0.71160409556314, "acc_norm_stderr": 0.013238394422428173 }, "harness|hellaswag|10": { "acc": 0.7148974307906791, "acc_stderr": 0.00450540617660685, "acc_norm": 0.8850826528579964, "acc_norm_stderr": 0.00318270383035113 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.75, "acc_stderr": 0.03523807393012047, "acc_norm": 0.75, "acc_norm_stderr": 0.03523807393012047 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.04461960433384741, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956913, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956913 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6763005780346821, "acc_stderr": 0.035676037996391706, "acc_norm": 0.6763005780346821, "acc_norm_stderr": 0.035676037996391706 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6212765957446809, "acc_stderr": 0.03170995606040655, "acc_norm": 0.6212765957446809, "acc_norm_stderr": 0.03170995606040655 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6344827586206897, "acc_stderr": 0.040131241954243856, "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.040131241954243856 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.5, "acc_stderr": 0.025751310131230234, "acc_norm": 0.5, "acc_norm_stderr": 0.025751310131230234 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8258064516129032, "acc_stderr": 0.021576248184514587, "acc_norm": 0.8258064516129032, "acc_norm_stderr": 0.021576248184514587 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603348, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603348 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402534, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402534 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.029381620726465073, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.029381620726465073 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.029344572500634332, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.029344572500634332 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374308, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374308 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5787037037037037, "acc_stderr": 0.033674621388960775, "acc_norm": 0.5787037037037037, "acc_norm_stderr": 0.033674621388960775 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156862, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156862 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8481012658227848, "acc_stderr": 0.023363878096632446, "acc_norm": 0.8481012658227848, "acc_norm_stderr": 0.023363878096632446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596915, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596915 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.03492606476623791, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.03492606476623791 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.0230866350868414, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.0230866350868414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8071519795657727, "acc_stderr": 0.014108533515757431, "acc_norm": 0.8071519795657727, "acc_norm_stderr": 0.014108533515757431 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7543352601156069, "acc_stderr": 0.023176298203992005, "acc_norm": 0.7543352601156069, "acc_norm_stderr": 0.023176298203992005 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.39776536312849164, "acc_stderr": 0.01636920497126298, "acc_norm": 0.39776536312849164, "acc_norm_stderr": 0.01636920497126298 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.761437908496732, "acc_stderr": 0.02440439492808787, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.02440439492808787 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7266881028938906, "acc_stderr": 0.025311765975426122, "acc_norm": 0.7266881028938906, "acc_norm_stderr": 0.025311765975426122 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7808641975308642, "acc_stderr": 0.02301670564026219, "acc_norm": 0.7808641975308642, "acc_norm_stderr": 0.02301670564026219 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.49645390070921985, "acc_stderr": 0.02982674915328092, "acc_norm": 0.49645390070921985, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4934810951760104, "acc_stderr": 0.012769150688867503, "acc_norm": 0.4934810951760104, "acc_norm_stderr": 0.012769150688867503 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7426470588235294, "acc_stderr": 0.02655651947004151, "acc_norm": 0.7426470588235294, "acc_norm_stderr": 0.02655651947004151 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6813725490196079, "acc_stderr": 0.018850084696468712, "acc_norm": 0.6813725490196079, "acc_norm_stderr": 0.018850084696468712 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.027979823538744546, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.027979823538744546 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598053, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598053 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03188578017686398, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03188578017686398 }, "harness|truthfulqa:mc|0": { "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314747, "mc2": 0.7196929772646223, "mc2_stderr": 0.014983410075230245 }, "harness|winogrande|5": { "acc": 0.8334648776637726, "acc_stderr": 0.010470796496781093 }, "harness|gsm8k|5": { "acc": 0.6444275966641395, "acc_stderr": 0.013185402252713852 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jeonsworld__CarbonVillain-en-10.7B-v5
[ "region:us" ]
2024-01-04T11:48:36+00:00
{"pretty_name": "Evaluation run of jeonsworld/CarbonVillain-en-10.7B-v5", "dataset_summary": "Dataset automatically created during the evaluation run of model [jeonsworld/CarbonVillain-en-10.7B-v5](https://huggingface.co/jeonsworld/CarbonVillain-en-10.7B-v5) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jeonsworld__CarbonVillain-en-10.7B-v5\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T11:46:21.966260](https://huggingface.co/datasets/open-llm-leaderboard/details_jeonsworld__CarbonVillain-en-10.7B-v5/blob/main/results_2024-01-04T11-46-21.966260.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6679993758554225,\n \"acc_stderr\": 0.031636184711630856,\n \"acc_norm\": 0.6689272065570435,\n \"acc_norm_stderr\": 0.0322795610683572,\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314747,\n \"mc2\": 0.7196929772646223,\n \"mc2_stderr\": 0.014983410075230245\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6851535836177475,\n \"acc_stderr\": 0.01357265770308495,\n \"acc_norm\": 0.71160409556314,\n \"acc_norm_stderr\": 0.013238394422428173\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7148974307906791,\n \"acc_stderr\": 0.00450540617660685,\n \"acc_norm\": 0.8850826528579964,\n \"acc_norm_stderr\": 0.00318270383035113\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03523807393012047,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03523807393012047\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956913,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956913\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6763005780346821,\n \"acc_stderr\": 0.035676037996391706,\n \"acc_norm\": 0.6763005780346821,\n \"acc_norm_stderr\": 0.035676037996391706\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6212765957446809,\n \"acc_stderr\": 0.03170995606040655,\n \"acc_norm\": 0.6212765957446809,\n \"acc_norm_stderr\": 0.03170995606040655\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6344827586206897,\n \"acc_stderr\": 0.040131241954243856,\n \"acc_norm\": 0.6344827586206897,\n \"acc_norm_stderr\": 0.040131241954243856\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.025751310131230234,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.025751310131230234\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8258064516129032,\n \"acc_stderr\": 0.021576248184514587,\n \"acc_norm\": 0.8258064516129032,\n \"acc_norm_stderr\": 0.021576248184514587\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402534,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402534\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.029381620726465073,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.029381620726465073\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.029344572500634332,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.029344572500634332\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374308,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374308\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5787037037037037,\n \"acc_stderr\": 0.033674621388960775,\n \"acc_norm\": 0.5787037037037037,\n \"acc_norm_stderr\": 0.033674621388960775\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156862,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156862\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8481012658227848,\n \"acc_stderr\": 0.023363878096632446,\n \"acc_norm\": 0.8481012658227848,\n \"acc_norm_stderr\": 0.023363878096632446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596915,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596915\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.03492606476623791,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.03492606476623791\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.0230866350868414,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.0230866350868414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8071519795657727,\n \"acc_stderr\": 0.014108533515757431,\n \"acc_norm\": 0.8071519795657727,\n \"acc_norm_stderr\": 0.014108533515757431\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7543352601156069,\n \"acc_stderr\": 0.023176298203992005,\n \"acc_norm\": 0.7543352601156069,\n \"acc_norm_stderr\": 0.023176298203992005\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.39776536312849164,\n \"acc_stderr\": 0.01636920497126298,\n \"acc_norm\": 0.39776536312849164,\n \"acc_norm_stderr\": 0.01636920497126298\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.761437908496732,\n \"acc_stderr\": 0.02440439492808787,\n \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.02440439492808787\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7266881028938906,\n \"acc_stderr\": 0.025311765975426122,\n \"acc_norm\": 0.7266881028938906,\n \"acc_norm_stderr\": 0.025311765975426122\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7808641975308642,\n \"acc_stderr\": 0.02301670564026219,\n \"acc_norm\": 0.7808641975308642,\n \"acc_norm_stderr\": 0.02301670564026219\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.49645390070921985,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.49645390070921985,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4934810951760104,\n \"acc_stderr\": 0.012769150688867503,\n \"acc_norm\": 0.4934810951760104,\n \"acc_norm_stderr\": 0.012769150688867503\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7426470588235294,\n \"acc_stderr\": 0.02655651947004151,\n \"acc_norm\": 0.7426470588235294,\n \"acc_norm_stderr\": 0.02655651947004151\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6813725490196079,\n \"acc_stderr\": 0.018850084696468712,\n \"acc_norm\": 0.6813725490196079,\n \"acc_norm_stderr\": 0.018850084696468712\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.027979823538744546,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.027979823538744546\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03188578017686398,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03188578017686398\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314747,\n \"mc2\": 0.7196929772646223,\n \"mc2_stderr\": 0.014983410075230245\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8334648776637726,\n \"acc_stderr\": 0.010470796496781093\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6444275966641395,\n \"acc_stderr\": 0.013185402252713852\n }\n}\n```", "repo_url": "https://huggingface.co/jeonsworld/CarbonVillain-en-10.7B-v5", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|arc:challenge|25_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|gsm8k|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hellaswag|10_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T11-46-21.966260.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["**/details_harness|winogrande|5_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T11-46-21.966260.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T11_46_21.966260", "path": ["results_2024-01-04T11-46-21.966260.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T11-46-21.966260.parquet"]}]}]}
2024-01-04T11:48:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jeonsworld/CarbonVillain-en-10.7B-v5 Dataset automatically created during the evaluation run of model jeonsworld/CarbonVillain-en-10.7B-v5 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T11:46:21.966260(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jeonsworld/CarbonVillain-en-10.7B-v5\n\n\n\nDataset automatically created during the evaluation run of model jeonsworld/CarbonVillain-en-10.7B-v5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T11:46:21.966260(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jeonsworld/CarbonVillain-en-10.7B-v5\n\n\n\nDataset automatically created during the evaluation run of model jeonsworld/CarbonVillain-en-10.7B-v5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T11:46:21.966260(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jeonsworld/CarbonVillain-en-10.7B-v5\n\n\n\nDataset automatically created during the evaluation run of model jeonsworld/CarbonVillain-en-10.7B-v5 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T11:46:21.966260(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
966e343a972434114a7883ad658632c0835b9e80
# Dataset Card for Evaluation run of SanjiWatsuki/Lelantos-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [SanjiWatsuki/Lelantos-7B](https://huggingface.co/SanjiWatsuki/Lelantos-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_SanjiWatsuki__Lelantos-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T11:51:45.289850](https://huggingface.co/datasets/open-llm-leaderboard/details_SanjiWatsuki__Lelantos-7B/blob/main/results_2024-01-04T11-51-45.289850.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6459625636151246, "acc_stderr": 0.03219655588014755, "acc_norm": 0.6456793887410595, "acc_norm_stderr": 0.03286123598105045, "mc1": 0.4773561811505508, "mc1_stderr": 0.01748554225848965, "mc2": 0.6518191080402123, "mc2_stderr": 0.015251041012498917 }, "harness|arc:challenge|25": { "acc": 0.6621160409556314, "acc_stderr": 0.013822047922283512, "acc_norm": 0.6902730375426621, "acc_norm_stderr": 0.013512058415238363 }, "harness|hellaswag|10": { "acc": 0.6910973909579765, "acc_stderr": 0.004610966122378294, "acc_norm": 0.8690499900418244, "acc_norm_stderr": 0.0033665623627415564 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5925925925925926, "acc_stderr": 0.04244633238353227, "acc_norm": 0.5925925925925926, "acc_norm_stderr": 0.04244633238353227 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7171052631578947, "acc_stderr": 0.03665349695640767, "acc_norm": 0.7171052631578947, "acc_norm_stderr": 0.03665349695640767 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6716981132075471, "acc_stderr": 0.02890159361241178, "acc_norm": 0.6716981132075471, "acc_norm_stderr": 0.02890159361241178 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566018, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566018 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6242774566473989, "acc_stderr": 0.036928207672648664, "acc_norm": 0.6242774566473989, "acc_norm_stderr": 0.036928207672648664 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4019607843137255, "acc_stderr": 0.04878608714466996, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.04878608714466996 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5574468085106383, "acc_stderr": 0.03246956919789958, "acc_norm": 0.5574468085106383, "acc_norm_stderr": 0.03246956919789958 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555498, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555498 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.02548718714785938, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.02548718714785938 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7870967741935484, "acc_stderr": 0.02328766512726855, "acc_norm": 0.7870967741935484, "acc_norm_stderr": 0.02328766512726855 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7777777777777778, "acc_stderr": 0.02962022787479049, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.02962022787479049 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033456, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033456 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6435897435897436, "acc_stderr": 0.024283140529467305, "acc_norm": 0.6435897435897436, "acc_norm_stderr": 0.024283140529467305 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3296296296296296, "acc_stderr": 0.028661201116524565, "acc_norm": 0.3296296296296296, "acc_norm_stderr": 0.028661201116524565 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6848739495798319, "acc_stderr": 0.030176808288974337, "acc_norm": 0.6848739495798319, "acc_norm_stderr": 0.030176808288974337 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31125827814569534, "acc_stderr": 0.03780445850526732, "acc_norm": 0.31125827814569534, "acc_norm_stderr": 0.03780445850526732 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8403669724770643, "acc_stderr": 0.015703498348461763, "acc_norm": 0.8403669724770643, "acc_norm_stderr": 0.015703498348461763 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5231481481481481, "acc_stderr": 0.03406315360711507, "acc_norm": 0.5231481481481481, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8137254901960784, "acc_stderr": 0.027325470966716312, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.027325470966716312 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8185654008438819, "acc_stderr": 0.02508596114457966, "acc_norm": 0.8185654008438819, "acc_norm_stderr": 0.02508596114457966 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.034981493854624734, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.034981493854624734 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243838, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243838 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7852760736196319, "acc_stderr": 0.032262193772867744, "acc_norm": 0.7852760736196319, "acc_norm_stderr": 0.032262193772867744 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8275862068965517, "acc_stderr": 0.013507943909371803, "acc_norm": 0.8275862068965517, "acc_norm_stderr": 0.013507943909371803 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7196531791907514, "acc_stderr": 0.024182427496577615, "acc_norm": 0.7196531791907514, "acc_norm_stderr": 0.024182427496577615 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.40670391061452515, "acc_stderr": 0.016428811915898865, "acc_norm": 0.40670391061452515, "acc_norm_stderr": 0.016428811915898865 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7352941176470589, "acc_stderr": 0.02526169121972948, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.02526169121972948 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7234726688102894, "acc_stderr": 0.025403832978179615, "acc_norm": 0.7234726688102894, "acc_norm_stderr": 0.025403832978179615 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7407407407407407, "acc_stderr": 0.024383665531035457, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.024383665531035457 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4706649282920469, "acc_stderr": 0.012748238397365549, "acc_norm": 0.4706649282920469, "acc_norm_stderr": 0.012748238397365549 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6948529411764706, "acc_stderr": 0.027971541370170598, "acc_norm": 0.6948529411764706, "acc_norm_stderr": 0.027971541370170598 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6830065359477124, "acc_stderr": 0.018824219512706207, "acc_norm": 0.6830065359477124, "acc_norm_stderr": 0.018824219512706207 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.02812342933514278, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.02812342933514278 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8059701492537313, "acc_stderr": 0.027962677604768917, "acc_norm": 0.8059701492537313, "acc_norm_stderr": 0.027962677604768917 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699121, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699121 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.4773561811505508, "mc1_stderr": 0.01748554225848965, "mc2": 0.6518191080402123, "mc2_stderr": 0.015251041012498917 }, "harness|winogrande|5": { "acc": 0.8066298342541437, "acc_stderr": 0.011099796645920533 }, "harness|gsm8k|5": { "acc": 0.7081122062168309, "acc_stderr": 0.012522795894420867 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_SanjiWatsuki__Lelantos-7B
[ "region:us" ]
2024-01-04T11:54:01+00:00
{"pretty_name": "Evaluation run of SanjiWatsuki/Lelantos-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [SanjiWatsuki/Lelantos-7B](https://huggingface.co/SanjiWatsuki/Lelantos-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_SanjiWatsuki__Lelantos-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T11:51:45.289850](https://huggingface.co/datasets/open-llm-leaderboard/details_SanjiWatsuki__Lelantos-7B/blob/main/results_2024-01-04T11-51-45.289850.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6459625636151246,\n \"acc_stderr\": 0.03219655588014755,\n \"acc_norm\": 0.6456793887410595,\n \"acc_norm_stderr\": 0.03286123598105045,\n \"mc1\": 0.4773561811505508,\n \"mc1_stderr\": 0.01748554225848965,\n \"mc2\": 0.6518191080402123,\n \"mc2_stderr\": 0.015251041012498917\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6621160409556314,\n \"acc_stderr\": 0.013822047922283512,\n \"acc_norm\": 0.6902730375426621,\n \"acc_norm_stderr\": 0.013512058415238363\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6910973909579765,\n \"acc_stderr\": 0.004610966122378294,\n \"acc_norm\": 0.8690499900418244,\n \"acc_norm_stderr\": 0.0033665623627415564\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5925925925925926,\n \"acc_stderr\": 0.04244633238353227,\n \"acc_norm\": 0.5925925925925926,\n \"acc_norm_stderr\": 0.04244633238353227\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7171052631578947,\n \"acc_stderr\": 0.03665349695640767,\n \"acc_norm\": 0.7171052631578947,\n \"acc_norm_stderr\": 0.03665349695640767\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6716981132075471,\n \"acc_stderr\": 0.02890159361241178,\n \"acc_norm\": 0.6716981132075471,\n \"acc_norm_stderr\": 0.02890159361241178\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n \"acc_stderr\": 0.03716177437566018,\n \"acc_norm\": 0.7291666666666666,\n \"acc_norm_stderr\": 0.03716177437566018\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6242774566473989,\n \"acc_stderr\": 0.036928207672648664,\n \"acc_norm\": 0.6242774566473989,\n \"acc_norm_stderr\": 0.036928207672648664\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.04878608714466996,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.04878608714466996\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.03246956919789958,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.03246956919789958\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.02548718714785938,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.02548718714785938\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.02328766512726855,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.02328766512726855\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.02962022787479049,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.02962022787479049\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6435897435897436,\n \"acc_stderr\": 0.024283140529467305,\n \"acc_norm\": 0.6435897435897436,\n \"acc_norm_stderr\": 0.024283140529467305\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3296296296296296,\n \"acc_stderr\": 0.028661201116524565,\n \"acc_norm\": 0.3296296296296296,\n \"acc_norm_stderr\": 0.028661201116524565\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6848739495798319,\n \"acc_stderr\": 0.030176808288974337,\n \"acc_norm\": 0.6848739495798319,\n \"acc_norm_stderr\": 0.030176808288974337\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31125827814569534,\n \"acc_stderr\": 0.03780445850526732,\n \"acc_norm\": 0.31125827814569534,\n \"acc_norm_stderr\": 0.03780445850526732\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8403669724770643,\n \"acc_stderr\": 0.015703498348461763,\n \"acc_norm\": 0.8403669724770643,\n \"acc_norm_stderr\": 0.015703498348461763\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5231481481481481,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.5231481481481481,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8137254901960784,\n \"acc_stderr\": 0.027325470966716312,\n \"acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.027325470966716312\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8185654008438819,\n \"acc_stderr\": 0.02508596114457966,\n \"acc_norm\": 0.8185654008438819,\n \"acc_norm_stderr\": 0.02508596114457966\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.034981493854624734,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.034981493854624734\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.032262193772867744,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.032262193772867744\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8275862068965517,\n \"acc_stderr\": 0.013507943909371803,\n \"acc_norm\": 0.8275862068965517,\n \"acc_norm_stderr\": 0.013507943909371803\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7196531791907514,\n \"acc_stderr\": 0.024182427496577615,\n \"acc_norm\": 0.7196531791907514,\n \"acc_norm_stderr\": 0.024182427496577615\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.40670391061452515,\n \"acc_stderr\": 0.016428811915898865,\n \"acc_norm\": 0.40670391061452515,\n \"acc_norm_stderr\": 0.016428811915898865\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.02526169121972948,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.02526169121972948\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7234726688102894,\n \"acc_stderr\": 0.025403832978179615,\n \"acc_norm\": 0.7234726688102894,\n \"acc_norm_stderr\": 0.025403832978179615\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.024383665531035457,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.024383665531035457\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4706649282920469,\n \"acc_stderr\": 0.012748238397365549,\n \"acc_norm\": 0.4706649282920469,\n \"acc_norm_stderr\": 0.012748238397365549\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6948529411764706,\n \"acc_stderr\": 0.027971541370170598,\n \"acc_norm\": 0.6948529411764706,\n \"acc_norm_stderr\": 0.027971541370170598\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6830065359477124,\n \"acc_stderr\": 0.018824219512706207,\n \"acc_norm\": 0.6830065359477124,\n \"acc_norm_stderr\": 0.018824219512706207\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.02812342933514278,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.02812342933514278\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8059701492537313,\n \"acc_stderr\": 0.027962677604768917,\n \"acc_norm\": 0.8059701492537313,\n \"acc_norm_stderr\": 0.027962677604768917\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699121,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699121\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4773561811505508,\n \"mc1_stderr\": 0.01748554225848965,\n \"mc2\": 0.6518191080402123,\n \"mc2_stderr\": 0.015251041012498917\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8066298342541437,\n \"acc_stderr\": 0.011099796645920533\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7081122062168309,\n \"acc_stderr\": 0.012522795894420867\n }\n}\n```", "repo_url": "https://huggingface.co/SanjiWatsuki/Lelantos-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|arc:challenge|25_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|gsm8k|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hellaswag|10_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T11-51-45.289850.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["**/details_harness|winogrande|5_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T11-51-45.289850.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T11_51_45.289850", "path": ["results_2024-01-04T11-51-45.289850.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T11-51-45.289850.parquet"]}]}]}
2024-01-04T11:54:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of SanjiWatsuki/Lelantos-7B Dataset automatically created during the evaluation run of model SanjiWatsuki/Lelantos-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T11:51:45.289850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of SanjiWatsuki/Lelantos-7B\n\n\n\nDataset automatically created during the evaluation run of model SanjiWatsuki/Lelantos-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T11:51:45.289850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of SanjiWatsuki/Lelantos-7B\n\n\n\nDataset automatically created during the evaluation run of model SanjiWatsuki/Lelantos-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T11:51:45.289850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 183, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of SanjiWatsuki/Lelantos-7B\n\n\n\nDataset automatically created during the evaluation run of model SanjiWatsuki/Lelantos-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T11:51:45.289850(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
3a31db4b84efeaa6fc2fa009c6d4da725111b844
# Dataset Card for Evaluation run of perlthoughts/openchat-3.5-1210-32k <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [perlthoughts/openchat-3.5-1210-32k](https://huggingface.co/perlthoughts/openchat-3.5-1210-32k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_perlthoughts__openchat-3.5-1210-32k", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T11:53:58.629434](https://huggingface.co/datasets/open-llm-leaderboard/details_perlthoughts__openchat-3.5-1210-32k/blob/main/results_2024-01-04T11-53-58.629434.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6166284571376112, "acc_stderr": 0.03270043011809276, "acc_norm": 0.620202033653911, "acc_norm_stderr": 0.033356706588459746, "mc1": 0.3292533659730722, "mc1_stderr": 0.016451264440068232, "mc2": 0.4931442577289458, "mc2_stderr": 0.015404462724680936 }, "harness|arc:challenge|25": { "acc": 0.5972696245733788, "acc_stderr": 0.01433223630679015, "acc_norm": 0.6467576791808873, "acc_norm_stderr": 0.013967822714840055 }, "harness|hellaswag|10": { "acc": 0.6394144592710616, "acc_stderr": 0.004791890625834195, "acc_norm": 0.8405696076478789, "acc_norm_stderr": 0.0036532880435558007 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6, "acc_stderr": 0.04232073695151589, "acc_norm": 0.6, "acc_norm_stderr": 0.04232073695151589 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6754716981132075, "acc_stderr": 0.028815615713432108, "acc_norm": 0.6754716981132075, "acc_norm_stderr": 0.028815615713432108 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6944444444444444, "acc_stderr": 0.03852084696008534, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.03852084696008534 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6011560693641619, "acc_stderr": 0.037336266553835096, "acc_norm": 0.6011560693641619, "acc_norm_stderr": 0.037336266553835096 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.30392156862745096, "acc_stderr": 0.04576665403207762, "acc_norm": 0.30392156862745096, "acc_norm_stderr": 0.04576665403207762 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5319148936170213, "acc_stderr": 0.03261936918467383, "acc_norm": 0.5319148936170213, "acc_norm_stderr": 0.03261936918467383 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3783068783068783, "acc_stderr": 0.02497695405315525, "acc_norm": 0.3783068783068783, "acc_norm_stderr": 0.02497695405315525 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7741935483870968, "acc_stderr": 0.023785577884181015, "acc_norm": 0.7741935483870968, "acc_norm_stderr": 0.023785577884181015 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4729064039408867, "acc_stderr": 0.03512819077876105, "acc_norm": 0.4729064039408867, "acc_norm_stderr": 0.03512819077876105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.61, "acc_stderr": 0.04902071300001974, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.03317505930009181, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.03317505930009181 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7070707070707071, "acc_stderr": 0.03242497958178815, "acc_norm": 0.7070707070707071, "acc_norm_stderr": 0.03242497958178815 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8704663212435233, "acc_stderr": 0.024233532297758733, "acc_norm": 0.8704663212435233, "acc_norm_stderr": 0.024233532297758733 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563976, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563976 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.28888888888888886, "acc_stderr": 0.027634907264178544, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.027634907264178544 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6134453781512605, "acc_stderr": 0.03163145807552379, "acc_norm": 0.6134453781512605, "acc_norm_stderr": 0.03163145807552379 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8293577981651377, "acc_stderr": 0.01612927102509986, "acc_norm": 0.8293577981651377, "acc_norm_stderr": 0.01612927102509986 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.47685185185185186, "acc_stderr": 0.03406315360711507, "acc_norm": 0.47685185185185186, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.803921568627451, "acc_stderr": 0.027865942286639325, "acc_norm": 0.803921568627451, "acc_norm_stderr": 0.027865942286639325 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7932489451476793, "acc_stderr": 0.0263616516683891, "acc_norm": 0.7932489451476793, "acc_norm_stderr": 0.0263616516683891 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.732824427480916, "acc_stderr": 0.038808483010823944, "acc_norm": 0.732824427480916, "acc_norm_stderr": 0.038808483010823944 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.036401182719909476, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.036401182719909476 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7300613496932515, "acc_stderr": 0.034878251684978906, "acc_norm": 0.7300613496932515, "acc_norm_stderr": 0.034878251684978906 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489123, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.039891398595317706, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.039891398595317706 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8504273504273504, "acc_stderr": 0.023365051491753715, "acc_norm": 0.8504273504273504, "acc_norm_stderr": 0.023365051491753715 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7956577266922095, "acc_stderr": 0.014419123980931894, "acc_norm": 0.7956577266922095, "acc_norm_stderr": 0.014419123980931894 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7138728323699421, "acc_stderr": 0.02433214677913413, "acc_norm": 0.7138728323699421, "acc_norm_stderr": 0.02433214677913413 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.32737430167597764, "acc_stderr": 0.015694238967737386, "acc_norm": 0.32737430167597764, "acc_norm_stderr": 0.015694238967737386 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6928104575163399, "acc_stderr": 0.026415601914388992, "acc_norm": 0.6928104575163399, "acc_norm_stderr": 0.026415601914388992 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6784565916398714, "acc_stderr": 0.026527724079528872, "acc_norm": 0.6784565916398714, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7222222222222222, "acc_stderr": 0.024922001168886335, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.024922001168886335 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4397163120567376, "acc_stderr": 0.02960991207559411, "acc_norm": 0.4397163120567376, "acc_norm_stderr": 0.02960991207559411 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.455019556714472, "acc_stderr": 0.012718456618701782, "acc_norm": 0.455019556714472, "acc_norm_stderr": 0.012718456618701782 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6654411764705882, "acc_stderr": 0.028661996202335307, "acc_norm": 0.6654411764705882, "acc_norm_stderr": 0.028661996202335307 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6290849673202614, "acc_stderr": 0.01954210156485412, "acc_norm": 0.6290849673202614, "acc_norm_stderr": 0.01954210156485412 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6979591836734694, "acc_stderr": 0.029393609319879804, "acc_norm": 0.6979591836734694, "acc_norm_stderr": 0.029393609319879804 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8009950248756219, "acc_stderr": 0.028231365092758406, "acc_norm": 0.8009950248756219, "acc_norm_stderr": 0.028231365092758406 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.03588702812826371, "acc_norm": 0.85, "acc_norm_stderr": 0.03588702812826371 }, "harness|hendrycksTest-virology|5": { "acc": 0.4879518072289157, "acc_stderr": 0.0389136449583582, "acc_norm": 0.4879518072289157, "acc_norm_stderr": 0.0389136449583582 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.02991312723236804, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.02991312723236804 }, "harness|truthfulqa:mc|0": { "mc1": 0.3292533659730722, "mc1_stderr": 0.016451264440068232, "mc2": 0.4931442577289458, "mc2_stderr": 0.015404462724680936 }, "harness|winogrande|5": { "acc": 0.7916337805840569, "acc_stderr": 0.011414554399987729 }, "harness|gsm8k|5": { "acc": 0.48142532221379836, "acc_stderr": 0.013762977910317583 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_perlthoughts__openchat-3.5-1210-32k
[ "region:us" ]
2024-01-04T11:56:17+00:00
{"pretty_name": "Evaluation run of perlthoughts/openchat-3.5-1210-32k", "dataset_summary": "Dataset automatically created during the evaluation run of model [perlthoughts/openchat-3.5-1210-32k](https://huggingface.co/perlthoughts/openchat-3.5-1210-32k) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_perlthoughts__openchat-3.5-1210-32k\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T11:53:58.629434](https://huggingface.co/datasets/open-llm-leaderboard/details_perlthoughts__openchat-3.5-1210-32k/blob/main/results_2024-01-04T11-53-58.629434.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6166284571376112,\n \"acc_stderr\": 0.03270043011809276,\n \"acc_norm\": 0.620202033653911,\n \"acc_norm_stderr\": 0.033356706588459746,\n \"mc1\": 0.3292533659730722,\n \"mc1_stderr\": 0.016451264440068232,\n \"mc2\": 0.4931442577289458,\n \"mc2_stderr\": 0.015404462724680936\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5972696245733788,\n \"acc_stderr\": 0.01433223630679015,\n \"acc_norm\": 0.6467576791808873,\n \"acc_norm_stderr\": 0.013967822714840055\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6394144592710616,\n \"acc_stderr\": 0.004791890625834195,\n \"acc_norm\": 0.8405696076478789,\n \"acc_norm_stderr\": 0.0036532880435558007\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04232073695151589,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04232073695151589\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.028815615713432108,\n \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.028815615713432108\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.03852084696008534,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.03852084696008534\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6011560693641619,\n \"acc_stderr\": 0.037336266553835096,\n \"acc_norm\": 0.6011560693641619,\n \"acc_norm_stderr\": 0.037336266553835096\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.30392156862745096,\n \"acc_stderr\": 0.04576665403207762,\n \"acc_norm\": 0.30392156862745096,\n \"acc_norm_stderr\": 0.04576665403207762\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5319148936170213,\n \"acc_stderr\": 0.03261936918467383,\n \"acc_norm\": 0.5319148936170213,\n \"acc_norm_stderr\": 0.03261936918467383\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3783068783068783,\n \"acc_stderr\": 0.02497695405315525,\n \"acc_norm\": 0.3783068783068783,\n \"acc_norm_stderr\": 0.02497695405315525\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7741935483870968,\n \"acc_stderr\": 0.023785577884181015,\n \"acc_norm\": 0.7741935483870968,\n \"acc_norm_stderr\": 0.023785577884181015\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4729064039408867,\n \"acc_stderr\": 0.03512819077876105,\n \"acc_norm\": 0.4729064039408867,\n \"acc_norm_stderr\": 0.03512819077876105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009181,\n \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009181\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7070707070707071,\n \"acc_stderr\": 0.03242497958178815,\n \"acc_norm\": 0.7070707070707071,\n \"acc_norm_stderr\": 0.03242497958178815\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8704663212435233,\n \"acc_stderr\": 0.024233532297758733,\n \"acc_norm\": 0.8704663212435233,\n \"acc_norm_stderr\": 0.024233532297758733\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563976,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563976\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.28888888888888886,\n \"acc_stderr\": 0.027634907264178544,\n \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.027634907264178544\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6134453781512605,\n \"acc_stderr\": 0.03163145807552379,\n \"acc_norm\": 0.6134453781512605,\n \"acc_norm_stderr\": 0.03163145807552379\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2781456953642384,\n \"acc_stderr\": 0.03658603262763743,\n \"acc_norm\": 0.2781456953642384,\n \"acc_norm_stderr\": 0.03658603262763743\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8293577981651377,\n \"acc_stderr\": 0.01612927102509986,\n \"acc_norm\": 0.8293577981651377,\n \"acc_norm_stderr\": 0.01612927102509986\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.47685185185185186,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.47685185185185186,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.803921568627451,\n \"acc_stderr\": 0.027865942286639325,\n \"acc_norm\": 0.803921568627451,\n \"acc_norm_stderr\": 0.027865942286639325\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7932489451476793,\n \"acc_stderr\": 0.0263616516683891,\n \"acc_norm\": 0.7932489451476793,\n \"acc_norm_stderr\": 0.0263616516683891\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.732824427480916,\n \"acc_stderr\": 0.038808483010823944,\n \"acc_norm\": 0.732824427480916,\n \"acc_norm_stderr\": 0.038808483010823944\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.036401182719909476,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.036401182719909476\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7300613496932515,\n \"acc_stderr\": 0.034878251684978906,\n \"acc_norm\": 0.7300613496932515,\n \"acc_norm_stderr\": 0.034878251684978906\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8504273504273504,\n \"acc_stderr\": 0.023365051491753715,\n \"acc_norm\": 0.8504273504273504,\n \"acc_norm_stderr\": 0.023365051491753715\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7956577266922095,\n \"acc_stderr\": 0.014419123980931894,\n \"acc_norm\": 0.7956577266922095,\n \"acc_norm_stderr\": 0.014419123980931894\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7138728323699421,\n \"acc_stderr\": 0.02433214677913413,\n \"acc_norm\": 0.7138728323699421,\n \"acc_norm_stderr\": 0.02433214677913413\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.32737430167597764,\n \"acc_stderr\": 0.015694238967737386,\n \"acc_norm\": 0.32737430167597764,\n \"acc_norm_stderr\": 0.015694238967737386\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6928104575163399,\n \"acc_stderr\": 0.026415601914388992,\n \"acc_norm\": 0.6928104575163399,\n \"acc_norm_stderr\": 0.026415601914388992\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6784565916398714,\n \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.6784565916398714,\n \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.024922001168886335,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.024922001168886335\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4397163120567376,\n \"acc_stderr\": 0.02960991207559411,\n \"acc_norm\": 0.4397163120567376,\n \"acc_norm_stderr\": 0.02960991207559411\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.455019556714472,\n \"acc_stderr\": 0.012718456618701782,\n \"acc_norm\": 0.455019556714472,\n \"acc_norm_stderr\": 0.012718456618701782\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6654411764705882,\n \"acc_stderr\": 0.028661996202335307,\n \"acc_norm\": 0.6654411764705882,\n \"acc_norm_stderr\": 0.028661996202335307\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6290849673202614,\n \"acc_stderr\": 0.01954210156485412,\n \"acc_norm\": 0.6290849673202614,\n \"acc_norm_stderr\": 0.01954210156485412\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6979591836734694,\n \"acc_stderr\": 0.029393609319879804,\n \"acc_norm\": 0.6979591836734694,\n \"acc_norm_stderr\": 0.029393609319879804\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8009950248756219,\n \"acc_stderr\": 0.028231365092758406,\n \"acc_norm\": 0.8009950248756219,\n \"acc_norm_stderr\": 0.028231365092758406\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.03588702812826371,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.03588702812826371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4879518072289157,\n \"acc_stderr\": 0.0389136449583582,\n \"acc_norm\": 0.4879518072289157,\n \"acc_norm_stderr\": 0.0389136449583582\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.02991312723236804,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.02991312723236804\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3292533659730722,\n \"mc1_stderr\": 0.016451264440068232,\n \"mc2\": 0.4931442577289458,\n \"mc2_stderr\": 0.015404462724680936\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7916337805840569,\n \"acc_stderr\": 0.011414554399987729\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.48142532221379836,\n \"acc_stderr\": 0.013762977910317583\n }\n}\n```", "repo_url": "https://huggingface.co/perlthoughts/openchat-3.5-1210-32k", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|arc:challenge|25_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|gsm8k|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hellaswag|10_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T11-53-58.629434.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["**/details_harness|winogrande|5_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T11-53-58.629434.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T11_53_58.629434", "path": ["results_2024-01-04T11-53-58.629434.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T11-53-58.629434.parquet"]}]}]}
2024-01-04T11:56:46+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of perlthoughts/openchat-3.5-1210-32k Dataset automatically created during the evaluation run of model perlthoughts/openchat-3.5-1210-32k on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T11:53:58.629434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of perlthoughts/openchat-3.5-1210-32k\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/openchat-3.5-1210-32k on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T11:53:58.629434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of perlthoughts/openchat-3.5-1210-32k\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/openchat-3.5-1210-32k on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T11:53:58.629434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of perlthoughts/openchat-3.5-1210-32k\n\n\n\nDataset automatically created during the evaluation run of model perlthoughts/openchat-3.5-1210-32k on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T11:53:58.629434(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
c1e0b5a7eb694835f908bb624a54097581f69d8e
# Dataset Card for Evaluation run of nlpguy/ColorShadow-7B-v3 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [nlpguy/ColorShadow-7B-v3](https://huggingface.co/nlpguy/ColorShadow-7B-v3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_nlpguy__ColorShadow-7B-v3", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:00:00.400283](https://huggingface.co/datasets/open-llm-leaderboard/details_nlpguy__ColorShadow-7B-v3/blob/main/results_2024-01-04T12-00-00.400283.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6077756404899893, "acc_stderr": 0.0331406868415802, "acc_norm": 0.6110717172741464, "acc_norm_stderr": 0.03381281098512482, "mc1": 0.45532435740514077, "mc1_stderr": 0.017433490102538765, "mc2": 0.6287894376799671, "mc2_stderr": 0.015043355245179869 }, "harness|arc:challenge|25": { "acc": 0.6313993174061433, "acc_stderr": 0.014097810678042196, "acc_norm": 0.6757679180887372, "acc_norm_stderr": 0.013678810399518822 }, "harness|hellaswag|10": { "acc": 0.6394144592710616, "acc_stderr": 0.004791890625834189, "acc_norm": 0.8504282015534754, "acc_norm_stderr": 0.0035592230156104953 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04292596718256981, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04292596718256981 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.0373852067611967, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.0373852067611967 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7222222222222222, "acc_stderr": 0.037455547914624555, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.037455547914624555 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.04975698519562428, "acc_norm": 0.43, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.653179190751445, "acc_stderr": 0.036291466701596636, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5404255319148936, "acc_stderr": 0.03257901482099835, "acc_norm": 0.5404255319148936, "acc_norm_stderr": 0.03257901482099835 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.04657047260594963, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.04657047260594963 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3968253968253968, "acc_stderr": 0.02519710107424648, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.02519710107424648 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3888888888888889, "acc_stderr": 0.04360314860077459, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.04360314860077459 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6838709677419355, "acc_stderr": 0.026450874489042764, "acc_norm": 0.6838709677419355, "acc_norm_stderr": 0.026450874489042764 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.64, "acc_stderr": 0.048241815132442176, "acc_norm": 0.64, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.703030303030303, "acc_stderr": 0.03567969772268049, "acc_norm": 0.703030303030303, "acc_norm_stderr": 0.03567969772268049 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7474747474747475, "acc_stderr": 0.030954055470365907, "acc_norm": 0.7474747474747475, "acc_norm_stderr": 0.030954055470365907 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8497409326424871, "acc_stderr": 0.025787723180723886, "acc_norm": 0.8497409326424871, "acc_norm_stderr": 0.025787723180723886 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6256410256410256, "acc_stderr": 0.024537591572830506, "acc_norm": 0.6256410256410256, "acc_norm_stderr": 0.024537591572830506 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3296296296296296, "acc_stderr": 0.028661201116524575, "acc_norm": 0.3296296296296296, "acc_norm_stderr": 0.028661201116524575 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6470588235294118, "acc_stderr": 0.031041941304059285, "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.031041941304059285 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8091743119266055, "acc_stderr": 0.016847676400091095, "acc_norm": 0.8091743119266055, "acc_norm_stderr": 0.016847676400091095 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.44907407407407407, "acc_stderr": 0.03392238405321616, "acc_norm": 0.44907407407407407, "acc_norm_stderr": 0.03392238405321616 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7254901960784313, "acc_stderr": 0.03132179803083289, "acc_norm": 0.7254901960784313, "acc_norm_stderr": 0.03132179803083289 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7721518987341772, "acc_stderr": 0.027303484599069422, "acc_norm": 0.7721518987341772, "acc_norm_stderr": 0.027303484599069422 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7175572519083969, "acc_stderr": 0.03948406125768361, "acc_norm": 0.7175572519083969, "acc_norm_stderr": 0.03948406125768361 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098825, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098825 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7407407407407407, "acc_stderr": 0.04236511258094633, "acc_norm": 0.7407407407407407, "acc_norm_stderr": 0.04236511258094633 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7116564417177914, "acc_stderr": 0.03559039531617342, "acc_norm": 0.7116564417177914, "acc_norm_stderr": 0.03559039531617342 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7184466019417476, "acc_stderr": 0.044532548363264673, "acc_norm": 0.7184466019417476, "acc_norm_stderr": 0.044532548363264673 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406978, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406978 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7816091954022989, "acc_stderr": 0.014774358319934495, "acc_norm": 0.7816091954022989, "acc_norm_stderr": 0.014774358319934495 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7052023121387283, "acc_stderr": 0.024547617794803828, "acc_norm": 0.7052023121387283, "acc_norm_stderr": 0.024547617794803828 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.36089385474860336, "acc_stderr": 0.01606229067111047, "acc_norm": 0.36089385474860336, "acc_norm_stderr": 0.01606229067111047 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6633986928104575, "acc_stderr": 0.02705797462449438, "acc_norm": 0.6633986928104575, "acc_norm_stderr": 0.02705797462449438 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.684887459807074, "acc_stderr": 0.026385273703464496, "acc_norm": 0.684887459807074, "acc_norm_stderr": 0.026385273703464496 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6635802469135802, "acc_stderr": 0.02628973494595293, "acc_norm": 0.6635802469135802, "acc_norm_stderr": 0.02628973494595293 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4787234042553192, "acc_stderr": 0.029800481645628693, "acc_norm": 0.4787234042553192, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.43089960886571055, "acc_stderr": 0.012647695889547228, "acc_norm": 0.43089960886571055, "acc_norm_stderr": 0.012647695889547228 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5661764705882353, "acc_stderr": 0.030105636570016633, "acc_norm": 0.5661764705882353, "acc_norm_stderr": 0.030105636570016633 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6405228758169934, "acc_stderr": 0.019412539242032168, "acc_norm": 0.6405228758169934, "acc_norm_stderr": 0.019412539242032168 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6938775510204082, "acc_stderr": 0.029504896454595957, "acc_norm": 0.6938775510204082, "acc_norm_stderr": 0.029504896454595957 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6766169154228856, "acc_stderr": 0.03307615947979033, "acc_norm": 0.6766169154228856, "acc_norm_stderr": 0.03307615947979033 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.82, "acc_stderr": 0.03861229196653693, "acc_norm": 0.82, "acc_norm_stderr": 0.03861229196653693 }, "harness|hendrycksTest-virology|5": { "acc": 0.5, "acc_stderr": 0.03892494720807614, "acc_norm": 0.5, "acc_norm_stderr": 0.03892494720807614 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.02954774168764004, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.02954774168764004 }, "harness|truthfulqa:mc|0": { "mc1": 0.45532435740514077, "mc1_stderr": 0.017433490102538765, "mc2": 0.6287894376799671, "mc2_stderr": 0.015043355245179869 }, "harness|winogrande|5": { "acc": 0.8011049723756906, "acc_stderr": 0.011218629972515302 }, "harness|gsm8k|5": { "acc": 0.47536012130401817, "acc_stderr": 0.013755751352764918 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_nlpguy__ColorShadow-7B-v3
[ "region:us" ]
2024-01-04T12:02:17+00:00
{"pretty_name": "Evaluation run of nlpguy/ColorShadow-7B-v3", "dataset_summary": "Dataset automatically created during the evaluation run of model [nlpguy/ColorShadow-7B-v3](https://huggingface.co/nlpguy/ColorShadow-7B-v3) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_nlpguy__ColorShadow-7B-v3\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:00:00.400283](https://huggingface.co/datasets/open-llm-leaderboard/details_nlpguy__ColorShadow-7B-v3/blob/main/results_2024-01-04T12-00-00.400283.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6077756404899893,\n \"acc_stderr\": 0.0331406868415802,\n \"acc_norm\": 0.6110717172741464,\n \"acc_norm_stderr\": 0.03381281098512482,\n \"mc1\": 0.45532435740514077,\n \"mc1_stderr\": 0.017433490102538765,\n \"mc2\": 0.6287894376799671,\n \"mc2_stderr\": 0.015043355245179869\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6313993174061433,\n \"acc_stderr\": 0.014097810678042196,\n \"acc_norm\": 0.6757679180887372,\n \"acc_norm_stderr\": 0.013678810399518822\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6394144592710616,\n \"acc_stderr\": 0.004791890625834189,\n \"acc_norm\": 0.8504282015534754,\n \"acc_norm_stderr\": 0.0035592230156104953\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04292596718256981,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04292596718256981\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.0373852067611967,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.0373852067611967\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.037455547914624555,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.037455547914624555\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5404255319148936,\n \"acc_stderr\": 0.03257901482099835,\n \"acc_norm\": 0.5404255319148936,\n \"acc_norm_stderr\": 0.03257901482099835\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.04657047260594963,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.04657047260594963\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3968253968253968,\n \"acc_stderr\": 0.02519710107424648,\n \"acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.02519710107424648\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.04360314860077459,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.04360314860077459\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6838709677419355,\n \"acc_stderr\": 0.026450874489042764,\n \"acc_norm\": 0.6838709677419355,\n \"acc_norm_stderr\": 0.026450874489042764\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.703030303030303,\n \"acc_stderr\": 0.03567969772268049,\n \"acc_norm\": 0.703030303030303,\n \"acc_norm_stderr\": 0.03567969772268049\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7474747474747475,\n \"acc_stderr\": 0.030954055470365907,\n \"acc_norm\": 0.7474747474747475,\n \"acc_norm_stderr\": 0.030954055470365907\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8497409326424871,\n \"acc_stderr\": 0.025787723180723886,\n \"acc_norm\": 0.8497409326424871,\n \"acc_norm_stderr\": 0.025787723180723886\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6256410256410256,\n \"acc_stderr\": 0.024537591572830506,\n \"acc_norm\": 0.6256410256410256,\n \"acc_norm_stderr\": 0.024537591572830506\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3296296296296296,\n \"acc_stderr\": 0.028661201116524575,\n \"acc_norm\": 0.3296296296296296,\n \"acc_norm_stderr\": 0.028661201116524575\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.031041941304059285,\n \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.031041941304059285\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8091743119266055,\n \"acc_stderr\": 0.016847676400091095,\n \"acc_norm\": 0.8091743119266055,\n \"acc_norm_stderr\": 0.016847676400091095\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.44907407407407407,\n \"acc_stderr\": 0.03392238405321616,\n \"acc_norm\": 0.44907407407407407,\n \"acc_norm_stderr\": 0.03392238405321616\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7254901960784313,\n \"acc_stderr\": 0.03132179803083289,\n \"acc_norm\": 0.7254901960784313,\n \"acc_norm_stderr\": 0.03132179803083289\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7721518987341772,\n \"acc_stderr\": 0.027303484599069422,\n \"acc_norm\": 0.7721518987341772,\n \"acc_norm_stderr\": 0.027303484599069422\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7175572519083969,\n \"acc_stderr\": 0.03948406125768361,\n \"acc_norm\": 0.7175572519083969,\n \"acc_norm_stderr\": 0.03948406125768361\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098825,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098825\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7407407407407407,\n \"acc_stderr\": 0.04236511258094633,\n \"acc_norm\": 0.7407407407407407,\n \"acc_norm_stderr\": 0.04236511258094633\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7116564417177914,\n \"acc_stderr\": 0.03559039531617342,\n \"acc_norm\": 0.7116564417177914,\n \"acc_norm_stderr\": 0.03559039531617342\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7184466019417476,\n \"acc_stderr\": 0.044532548363264673,\n \"acc_norm\": 0.7184466019417476,\n \"acc_norm_stderr\": 0.044532548363264673\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406978,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406978\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7816091954022989,\n \"acc_stderr\": 0.014774358319934495,\n \"acc_norm\": 0.7816091954022989,\n \"acc_norm_stderr\": 0.014774358319934495\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7052023121387283,\n \"acc_stderr\": 0.024547617794803828,\n \"acc_norm\": 0.7052023121387283,\n \"acc_norm_stderr\": 0.024547617794803828\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.36089385474860336,\n \"acc_stderr\": 0.01606229067111047,\n \"acc_norm\": 0.36089385474860336,\n \"acc_norm_stderr\": 0.01606229067111047\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6633986928104575,\n \"acc_stderr\": 0.02705797462449438,\n \"acc_norm\": 0.6633986928104575,\n \"acc_norm_stderr\": 0.02705797462449438\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.684887459807074,\n \"acc_stderr\": 0.026385273703464496,\n \"acc_norm\": 0.684887459807074,\n \"acc_norm_stderr\": 0.026385273703464496\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6635802469135802,\n \"acc_stderr\": 0.02628973494595293,\n \"acc_norm\": 0.6635802469135802,\n \"acc_norm_stderr\": 0.02628973494595293\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4787234042553192,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.4787234042553192,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.43089960886571055,\n \"acc_stderr\": 0.012647695889547228,\n \"acc_norm\": 0.43089960886571055,\n \"acc_norm_stderr\": 0.012647695889547228\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5661764705882353,\n \"acc_stderr\": 0.030105636570016633,\n \"acc_norm\": 0.5661764705882353,\n \"acc_norm_stderr\": 0.030105636570016633\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6405228758169934,\n \"acc_stderr\": 0.019412539242032168,\n \"acc_norm\": 0.6405228758169934,\n \"acc_norm_stderr\": 0.019412539242032168\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6938775510204082,\n \"acc_stderr\": 0.029504896454595957,\n \"acc_norm\": 0.6938775510204082,\n \"acc_norm_stderr\": 0.029504896454595957\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6766169154228856,\n \"acc_stderr\": 0.03307615947979033,\n \"acc_norm\": 0.6766169154228856,\n \"acc_norm_stderr\": 0.03307615947979033\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.82,\n \"acc_stderr\": 0.03861229196653693,\n \"acc_norm\": 0.82,\n \"acc_norm_stderr\": 0.03861229196653693\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.03892494720807614,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.03892494720807614\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.02954774168764004,\n \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.02954774168764004\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.45532435740514077,\n \"mc1_stderr\": 0.017433490102538765,\n \"mc2\": 0.6287894376799671,\n \"mc2_stderr\": 0.015043355245179869\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8011049723756906,\n \"acc_stderr\": 0.011218629972515302\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.47536012130401817,\n \"acc_stderr\": 0.013755751352764918\n }\n}\n```", "repo_url": "https://huggingface.co/nlpguy/ColorShadow-7B-v3", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-00-00.400283.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["**/details_harness|winogrande|5_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-00-00.400283.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_00_00.400283", "path": ["results_2024-01-04T12-00-00.400283.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-00-00.400283.parquet"]}]}]}
2024-01-04T12:02:41+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of nlpguy/ColorShadow-7B-v3 Dataset automatically created during the evaluation run of model nlpguy/ColorShadow-7B-v3 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:00:00.400283(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of nlpguy/ColorShadow-7B-v3\n\n\n\nDataset automatically created during the evaluation run of model nlpguy/ColorShadow-7B-v3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:00:00.400283(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of nlpguy/ColorShadow-7B-v3\n\n\n\nDataset automatically created during the evaluation run of model nlpguy/ColorShadow-7B-v3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:00:00.400283(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of nlpguy/ColorShadow-7B-v3\n\n\n\nDataset automatically created during the evaluation run of model nlpguy/ColorShadow-7B-v3 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:00:00.400283(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
ff0b66c5a7f1ef132372cf87540584482e328098
# Dataset Card for Evaluation run of cognitivecomputations/dolphin-2.2.1-mistral-7b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [cognitivecomputations/dolphin-2.2.1-mistral-7b](https://huggingface.co/cognitivecomputations/dolphin-2.2.1-mistral-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_cognitivecomputations__dolphin-2.2.1-mistral-7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:00:28.671767](https://huggingface.co/datasets/open-llm-leaderboard/details_cognitivecomputations__dolphin-2.2.1-mistral-7b/blob/main/results_2024-01-04T12-00-28.671767.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6314773728091061, "acc_stderr": 0.032247020008011884, "acc_norm": 0.6351477685860653, "acc_norm_stderr": 0.03288962606766026, "mc1": 0.36474908200734396, "mc1_stderr": 0.016850961061720113, "mc2": 0.5314305414143765, "mc2_stderr": 0.015039173098592665 }, "harness|arc:challenge|25": { "acc": 0.60580204778157, "acc_stderr": 0.014280522667467323, "acc_norm": 0.6322525597269625, "acc_norm_stderr": 0.01409099561816848 }, "harness|hellaswag|10": { "acc": 0.6432981477793268, "acc_stderr": 0.004780467270911771, "acc_norm": 0.8379804819757021, "acc_norm_stderr": 0.00367715668784884 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.04218506215368879, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.04218506215368879 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6710526315789473, "acc_stderr": 0.03823428969926604, "acc_norm": 0.6710526315789473, "acc_norm_stderr": 0.03823428969926604 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6792452830188679, "acc_stderr": 0.028727502957880263, "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.028727502957880263 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.75, "acc_stderr": 0.03621034121889507, "acc_norm": 0.75, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6358381502890174, "acc_stderr": 0.03669072477416907, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.03669072477416907 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105653, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105653 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5446808510638298, "acc_stderr": 0.03255525359340354, "acc_norm": 0.5446808510638298, "acc_norm_stderr": 0.03255525359340354 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.45614035087719296, "acc_stderr": 0.04685473041907789, "acc_norm": 0.45614035087719296, "acc_norm_stderr": 0.04685473041907789 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3835978835978836, "acc_stderr": 0.025043757318520196, "acc_norm": 0.3835978835978836, "acc_norm_stderr": 0.025043757318520196 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.04390259265377561, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.04390259265377561 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.02366421667164251, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.02366421667164251 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.47783251231527096, "acc_stderr": 0.03514528562175007, "acc_norm": 0.47783251231527096, "acc_norm_stderr": 0.03514528562175007 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.02937661648494562, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.02937661648494562 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8652849740932642, "acc_stderr": 0.024639789097709443, "acc_norm": 0.8652849740932642, "acc_norm_stderr": 0.024639789097709443 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6410256410256411, "acc_stderr": 0.024321738484602354, "acc_norm": 0.6410256410256411, "acc_norm_stderr": 0.024321738484602354 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.02831753349606649, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.02831753349606649 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.680672268907563, "acc_stderr": 0.0302839955258844, "acc_norm": 0.680672268907563, "acc_norm_stderr": 0.0302839955258844 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.304635761589404, "acc_stderr": 0.037579499229433426, "acc_norm": 0.304635761589404, "acc_norm_stderr": 0.037579499229433426 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8366972477064221, "acc_stderr": 0.01584825580650155, "acc_norm": 0.8366972477064221, "acc_norm_stderr": 0.01584825580650155 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.034076320938540516, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.034076320938540516 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7794117647058824, "acc_stderr": 0.02910225438967407, "acc_norm": 0.7794117647058824, "acc_norm_stderr": 0.02910225438967407 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7805907172995781, "acc_stderr": 0.026939106581553945, "acc_norm": 0.7805907172995781, "acc_norm_stderr": 0.026939106581553945 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098823, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098823 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.03826076324884866, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.03826076324884866 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.033519538795212696, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.022509033937077805, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.022509033937077805 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8109833971902938, "acc_stderr": 0.014000791294406999, "acc_norm": 0.8109833971902938, "acc_norm_stderr": 0.014000791294406999 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7196531791907514, "acc_stderr": 0.024182427496577612, "acc_norm": 0.7196531791907514, "acc_norm_stderr": 0.024182427496577612 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3865921787709497, "acc_stderr": 0.016286674879101022, "acc_norm": 0.3865921787709497, "acc_norm_stderr": 0.016286674879101022 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7124183006535948, "acc_stderr": 0.02591780611714716, "acc_norm": 0.7124183006535948, "acc_norm_stderr": 0.02591780611714716 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7202572347266881, "acc_stderr": 0.025494259350694912, "acc_norm": 0.7202572347266881, "acc_norm_stderr": 0.025494259350694912 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7037037037037037, "acc_stderr": 0.025407197798890162, "acc_norm": 0.7037037037037037, "acc_norm_stderr": 0.025407197798890162 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4787234042553192, "acc_stderr": 0.029800481645628693, "acc_norm": 0.4787234042553192, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4517601043024772, "acc_stderr": 0.012710662233660247, "acc_norm": 0.4517601043024772, "acc_norm_stderr": 0.012710662233660247 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6470588235294118, "acc_stderr": 0.029029422815681393, "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.029029422815681393 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6535947712418301, "acc_stderr": 0.01924978569171721, "acc_norm": 0.6535947712418301, "acc_norm_stderr": 0.01924978569171721 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.02853556033712844, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.02853556033712844 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169136, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169136 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.03379976689896309, "acc_norm": 0.87, "acc_norm_stderr": 0.03379976689896309 }, "harness|hendrycksTest-virology|5": { "acc": 0.5421686746987951, "acc_stderr": 0.0387862677100236, "acc_norm": 0.5421686746987951, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.36474908200734396, "mc1_stderr": 0.016850961061720113, "mc2": 0.5314305414143765, "mc2_stderr": 0.015039173098592665 }, "harness|winogrande|5": { "acc": 0.7861089187056038, "acc_stderr": 0.011524466954090257 }, "harness|gsm8k|5": { "acc": 0.48142532221379836, "acc_stderr": 0.013762977910317584 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_cognitivecomputations__dolphin-2.2.1-mistral-7b
[ "region:us" ]
2024-01-04T12:02:45+00:00
{"pretty_name": "Evaluation run of cognitivecomputations/dolphin-2.2.1-mistral-7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [cognitivecomputations/dolphin-2.2.1-mistral-7b](https://huggingface.co/cognitivecomputations/dolphin-2.2.1-mistral-7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cognitivecomputations__dolphin-2.2.1-mistral-7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:00:28.671767](https://huggingface.co/datasets/open-llm-leaderboard/details_cognitivecomputations__dolphin-2.2.1-mistral-7b/blob/main/results_2024-01-04T12-00-28.671767.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6314773728091061,\n \"acc_stderr\": 0.032247020008011884,\n \"acc_norm\": 0.6351477685860653,\n \"acc_norm_stderr\": 0.03288962606766026,\n \"mc1\": 0.36474908200734396,\n \"mc1_stderr\": 0.016850961061720113,\n \"mc2\": 0.5314305414143765,\n \"mc2_stderr\": 0.015039173098592665\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.60580204778157,\n \"acc_stderr\": 0.014280522667467323,\n \"acc_norm\": 0.6322525597269625,\n \"acc_norm_stderr\": 0.01409099561816848\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6432981477793268,\n \"acc_stderr\": 0.004780467270911771,\n \"acc_norm\": 0.8379804819757021,\n \"acc_norm_stderr\": 0.00367715668784884\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.04218506215368879,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.04218506215368879\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6710526315789473,\n \"acc_stderr\": 0.03823428969926604,\n \"acc_norm\": 0.6710526315789473,\n \"acc_norm_stderr\": 0.03823428969926604\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.028727502957880263,\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880263\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.03669072477416907,\n \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.03669072477416907\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105653,\n \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105653\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5446808510638298,\n \"acc_stderr\": 0.03255525359340354,\n \"acc_norm\": 0.5446808510638298,\n \"acc_norm_stderr\": 0.03255525359340354\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.45614035087719296,\n \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.45614035087719296,\n \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3835978835978836,\n \"acc_stderr\": 0.025043757318520196,\n \"acc_norm\": 0.3835978835978836,\n \"acc_norm_stderr\": 0.025043757318520196\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.04390259265377561,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.04390259265377561\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.02366421667164251,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.02366421667164251\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.47783251231527096,\n \"acc_stderr\": 0.03514528562175007,\n \"acc_norm\": 0.47783251231527096,\n \"acc_norm_stderr\": 0.03514528562175007\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.02937661648494562,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.02937661648494562\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8652849740932642,\n \"acc_stderr\": 0.024639789097709443,\n \"acc_norm\": 0.8652849740932642,\n \"acc_norm_stderr\": 0.024639789097709443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6410256410256411,\n \"acc_stderr\": 0.024321738484602354,\n \"acc_norm\": 0.6410256410256411,\n \"acc_norm_stderr\": 0.024321738484602354\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.02831753349606649,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.02831753349606649\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.0302839955258844,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.0302839955258844\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.304635761589404,\n \"acc_stderr\": 0.037579499229433426,\n \"acc_norm\": 0.304635761589404,\n \"acc_norm_stderr\": 0.037579499229433426\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8366972477064221,\n \"acc_stderr\": 0.01584825580650155,\n \"acc_norm\": 0.8366972477064221,\n \"acc_norm_stderr\": 0.01584825580650155\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.034076320938540516,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.034076320938540516\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7794117647058824,\n \"acc_stderr\": 0.02910225438967407,\n \"acc_norm\": 0.7794117647058824,\n \"acc_norm_stderr\": 0.02910225438967407\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098823,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098823\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.03826076324884866,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.03826076324884866\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.022509033937077805,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.022509033937077805\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8109833971902938,\n \"acc_stderr\": 0.014000791294406999,\n \"acc_norm\": 0.8109833971902938,\n \"acc_norm_stderr\": 0.014000791294406999\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7196531791907514,\n \"acc_stderr\": 0.024182427496577612,\n \"acc_norm\": 0.7196531791907514,\n \"acc_norm_stderr\": 0.024182427496577612\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3865921787709497,\n \"acc_stderr\": 0.016286674879101022,\n \"acc_norm\": 0.3865921787709497,\n \"acc_norm_stderr\": 0.016286674879101022\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7124183006535948,\n \"acc_stderr\": 0.02591780611714716,\n \"acc_norm\": 0.7124183006535948,\n \"acc_norm_stderr\": 0.02591780611714716\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.025494259350694912,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.025494259350694912\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7037037037037037,\n \"acc_stderr\": 0.025407197798890162,\n \"acc_norm\": 0.7037037037037037,\n \"acc_norm_stderr\": 0.025407197798890162\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4787234042553192,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.4787234042553192,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4517601043024772,\n \"acc_stderr\": 0.012710662233660247,\n \"acc_norm\": 0.4517601043024772,\n \"acc_norm_stderr\": 0.012710662233660247\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.029029422815681393,\n \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.029029422815681393\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6535947712418301,\n \"acc_stderr\": 0.01924978569171721,\n \"acc_norm\": 0.6535947712418301,\n \"acc_norm_stderr\": 0.01924978569171721\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.02853556033712844,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.02853556033712844\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.03379976689896309,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.03379976689896309\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.36474908200734396,\n \"mc1_stderr\": 0.016850961061720113,\n \"mc2\": 0.5314305414143765,\n \"mc2_stderr\": 0.015039173098592665\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7861089187056038,\n \"acc_stderr\": 0.011524466954090257\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.48142532221379836,\n \"acc_stderr\": 0.013762977910317584\n }\n}\n```", "repo_url": "https://huggingface.co/cognitivecomputations/dolphin-2.2.1-mistral-7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-00-28.671767.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["**/details_harness|winogrande|5_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-00-28.671767.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_00_28.671767", "path": ["results_2024-01-04T12-00-28.671767.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-00-28.671767.parquet"]}]}]}
2024-01-04T12:03:09+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of cognitivecomputations/dolphin-2.2.1-mistral-7b Dataset automatically created during the evaluation run of model cognitivecomputations/dolphin-2.2.1-mistral-7b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:00:28.671767(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of cognitivecomputations/dolphin-2.2.1-mistral-7b\n\n\n\nDataset automatically created during the evaluation run of model cognitivecomputations/dolphin-2.2.1-mistral-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:00:28.671767(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of cognitivecomputations/dolphin-2.2.1-mistral-7b\n\n\n\nDataset automatically created during the evaluation run of model cognitivecomputations/dolphin-2.2.1-mistral-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:00:28.671767(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 193, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of cognitivecomputations/dolphin-2.2.1-mistral-7b\n\n\n\nDataset automatically created during the evaluation run of model cognitivecomputations/dolphin-2.2.1-mistral-7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:00:28.671767(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
8da63fd3b48035ef7dcd657d7eb4480771057f47
# Dataset Card for Evaluation run of diffnamehard/Psyfighter2-Noromaid-ties-13B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [diffnamehard/Psyfighter2-Noromaid-ties-13B](https://huggingface.co/diffnamehard/Psyfighter2-Noromaid-ties-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_diffnamehard__Psyfighter2-Noromaid-ties-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:06:30.176184](https://huggingface.co/datasets/open-llm-leaderboard/details_diffnamehard__Psyfighter2-Noromaid-ties-13B/blob/main/results_2024-01-04T12-06-30.176184.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5703399525737872, "acc_stderr": 0.03348629315419525, "acc_norm": 0.5759195760487561, "acc_norm_stderr": 0.03418621682597921, "mc1": 0.3574051407588739, "mc1_stderr": 0.016776599676729405, "mc2": 0.5065675245616744, "mc2_stderr": 0.015592549631793743 }, "harness|arc:challenge|25": { "acc": 0.5930034129692833, "acc_stderr": 0.014356399418009121, "acc_norm": 0.6186006825938567, "acc_norm_stderr": 0.01419438908668525 }, "harness|hellaswag|10": { "acc": 0.6562437761402111, "acc_stderr": 0.004739902411944541, "acc_norm": 0.8458474407488548, "acc_norm_stderr": 0.003603569528678413 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411021, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411021 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4888888888888889, "acc_stderr": 0.04318275491977976, "acc_norm": 0.4888888888888889, "acc_norm_stderr": 0.04318275491977976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5657894736842105, "acc_stderr": 0.04033565667848319, "acc_norm": 0.5657894736842105, "acc_norm_stderr": 0.04033565667848319 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6, "acc_stderr": 0.03015113445777629, "acc_norm": 0.6, "acc_norm_stderr": 0.03015113445777629 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6180555555555556, "acc_stderr": 0.040629907841466674, "acc_norm": 0.6180555555555556, "acc_norm_stderr": 0.040629907841466674 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5433526011560693, "acc_stderr": 0.03798106566014498, "acc_norm": 0.5433526011560693, "acc_norm_stderr": 0.03798106566014498 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.040925639582376556, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.040925639582376556 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.71, "acc_stderr": 0.04560480215720685, "acc_norm": 0.71, "acc_norm_stderr": 0.04560480215720685 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4765957446808511, "acc_stderr": 0.03265019475033582, "acc_norm": 0.4765957446808511, "acc_norm_stderr": 0.03265019475033582 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.32456140350877194, "acc_stderr": 0.04404556157374768, "acc_norm": 0.32456140350877194, "acc_norm_stderr": 0.04404556157374768 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4827586206896552, "acc_stderr": 0.04164188720169377, "acc_norm": 0.4827586206896552, "acc_norm_stderr": 0.04164188720169377 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.328042328042328, "acc_stderr": 0.02418049716437691, "acc_norm": 0.328042328042328, "acc_norm_stderr": 0.02418049716437691 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04285714285714281, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04285714285714281 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6548387096774193, "acc_stderr": 0.02704574657353433, "acc_norm": 0.6548387096774193, "acc_norm_stderr": 0.02704574657353433 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4433497536945813, "acc_stderr": 0.03495334582162934, "acc_norm": 0.4433497536945813, "acc_norm_stderr": 0.03495334582162934 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6787878787878788, "acc_stderr": 0.0364620496325381, "acc_norm": 0.6787878787878788, "acc_norm_stderr": 0.0364620496325381 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7222222222222222, "acc_stderr": 0.031911782267135466, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.031911782267135466 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8238341968911918, "acc_stderr": 0.02749350424454806, "acc_norm": 0.8238341968911918, "acc_norm_stderr": 0.02749350424454806 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5282051282051282, "acc_stderr": 0.025310639254933893, "acc_norm": 0.5282051282051282, "acc_norm_stderr": 0.025310639254933893 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.028820884666253252, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.028820884666253252 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.592436974789916, "acc_stderr": 0.031918633744784645, "acc_norm": 0.592436974789916, "acc_norm_stderr": 0.031918633744784645 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.744954128440367, "acc_stderr": 0.01868850085653584, "acc_norm": 0.744954128440367, "acc_norm_stderr": 0.01868850085653584 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4212962962962963, "acc_stderr": 0.03367462138896078, "acc_norm": 0.4212962962962963, "acc_norm_stderr": 0.03367462138896078 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7794117647058824, "acc_stderr": 0.02910225438967408, "acc_norm": 0.7794117647058824, "acc_norm_stderr": 0.02910225438967408 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7848101265822784, "acc_stderr": 0.026750826994676177, "acc_norm": 0.7848101265822784, "acc_norm_stderr": 0.026750826994676177 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6412213740458015, "acc_stderr": 0.04206739313864908, "acc_norm": 0.6412213740458015, "acc_norm_stderr": 0.04206739313864908 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7520661157024794, "acc_stderr": 0.03941897526516303, "acc_norm": 0.7520661157024794, "acc_norm_stderr": 0.03941897526516303 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252626, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252626 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6932515337423313, "acc_stderr": 0.03623089915724145, "acc_norm": 0.6932515337423313, "acc_norm_stderr": 0.03623089915724145 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|hendrycksTest-management|5": { "acc": 0.6990291262135923, "acc_stderr": 0.045416094465039476, "acc_norm": 0.6990291262135923, "acc_norm_stderr": 0.045416094465039476 }, "harness|hendrycksTest-marketing|5": { "acc": 0.811965811965812, "acc_stderr": 0.025598193686652268, "acc_norm": 0.811965811965812, "acc_norm_stderr": 0.025598193686652268 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7637292464878672, "acc_stderr": 0.015190473717037514, "acc_norm": 0.7637292464878672, "acc_norm_stderr": 0.015190473717037514 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6329479768786127, "acc_stderr": 0.02595005433765407, "acc_norm": 0.6329479768786127, "acc_norm_stderr": 0.02595005433765407 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.49273743016759775, "acc_stderr": 0.016720737405179514, "acc_norm": 0.49273743016759775, "acc_norm_stderr": 0.016720737405179514 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6339869281045751, "acc_stderr": 0.027582811415159614, "acc_norm": 0.6339869281045751, "acc_norm_stderr": 0.027582811415159614 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6463022508038585, "acc_stderr": 0.027155208103200865, "acc_norm": 0.6463022508038585, "acc_norm_stderr": 0.027155208103200865 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6419753086419753, "acc_stderr": 0.026675611926037103, "acc_norm": 0.6419753086419753, "acc_norm_stderr": 0.026675611926037103 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4219858156028369, "acc_stderr": 0.029462189233370593, "acc_norm": 0.4219858156028369, "acc_norm_stderr": 0.029462189233370593 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.438722294654498, "acc_stderr": 0.012673969883493272, "acc_norm": 0.438722294654498, "acc_norm_stderr": 0.012673969883493272 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5588235294117647, "acc_stderr": 0.03016191193076711, "acc_norm": 0.5588235294117647, "acc_norm_stderr": 0.03016191193076711 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5669934640522876, "acc_stderr": 0.020045442473324224, "acc_norm": 0.5669934640522876, "acc_norm_stderr": 0.020045442473324224 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6181818181818182, "acc_stderr": 0.046534298079135075, "acc_norm": 0.6181818181818182, "acc_norm_stderr": 0.046534298079135075 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6530612244897959, "acc_stderr": 0.0304725260267265, "acc_norm": 0.6530612244897959, "acc_norm_stderr": 0.0304725260267265 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7661691542288557, "acc_stderr": 0.029929415408348384, "acc_norm": 0.7661691542288557, "acc_norm_stderr": 0.029929415408348384 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-virology|5": { "acc": 0.4819277108433735, "acc_stderr": 0.038899512528272166, "acc_norm": 0.4819277108433735, "acc_norm_stderr": 0.038899512528272166 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.783625730994152, "acc_stderr": 0.031581495393387324, "acc_norm": 0.783625730994152, "acc_norm_stderr": 0.031581495393387324 }, "harness|truthfulqa:mc|0": { "mc1": 0.3574051407588739, "mc1_stderr": 0.016776599676729405, "mc2": 0.5065675245616744, "mc2_stderr": 0.015592549631793743 }, "harness|winogrande|5": { "acc": 0.7537490134175217, "acc_stderr": 0.012108365307437523 }, "harness|gsm8k|5": { "acc": 0.27293404094010615, "acc_stderr": 0.012270381151108758 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_diffnamehard__Psyfighter2-Noromaid-ties-13B
[ "region:us" ]
2024-01-04T12:08:50+00:00
{"pretty_name": "Evaluation run of diffnamehard/Psyfighter2-Noromaid-ties-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [diffnamehard/Psyfighter2-Noromaid-ties-13B](https://huggingface.co/diffnamehard/Psyfighter2-Noromaid-ties-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_diffnamehard__Psyfighter2-Noromaid-ties-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:06:30.176184](https://huggingface.co/datasets/open-llm-leaderboard/details_diffnamehard__Psyfighter2-Noromaid-ties-13B/blob/main/results_2024-01-04T12-06-30.176184.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5703399525737872,\n \"acc_stderr\": 0.03348629315419525,\n \"acc_norm\": 0.5759195760487561,\n \"acc_norm_stderr\": 0.03418621682597921,\n \"mc1\": 0.3574051407588739,\n \"mc1_stderr\": 0.016776599676729405,\n \"mc2\": 0.5065675245616744,\n \"mc2_stderr\": 0.015592549631793743\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5930034129692833,\n \"acc_stderr\": 0.014356399418009121,\n \"acc_norm\": 0.6186006825938567,\n \"acc_norm_stderr\": 0.01419438908668525\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6562437761402111,\n \"acc_stderr\": 0.004739902411944541,\n \"acc_norm\": 0.8458474407488548,\n \"acc_norm_stderr\": 0.003603569528678413\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411021,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411021\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4888888888888889,\n \"acc_stderr\": 0.04318275491977976,\n \"acc_norm\": 0.4888888888888889,\n \"acc_norm_stderr\": 0.04318275491977976\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5657894736842105,\n \"acc_stderr\": 0.04033565667848319,\n \"acc_norm\": 0.5657894736842105,\n \"acc_norm_stderr\": 0.04033565667848319\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.03015113445777629,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.03015113445777629\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6180555555555556,\n \"acc_stderr\": 0.040629907841466674,\n \"acc_norm\": 0.6180555555555556,\n \"acc_norm_stderr\": 0.040629907841466674\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5433526011560693,\n \"acc_stderr\": 0.03798106566014498,\n \"acc_norm\": 0.5433526011560693,\n \"acc_norm_stderr\": 0.03798106566014498\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.040925639582376556,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.040925639582376556\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.04560480215720685,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.04560480215720685\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4765957446808511,\n \"acc_stderr\": 0.03265019475033582,\n \"acc_norm\": 0.4765957446808511,\n \"acc_norm_stderr\": 0.03265019475033582\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.32456140350877194,\n \"acc_stderr\": 0.04404556157374768,\n \"acc_norm\": 0.32456140350877194,\n \"acc_norm_stderr\": 0.04404556157374768\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4827586206896552,\n \"acc_stderr\": 0.04164188720169377,\n \"acc_norm\": 0.4827586206896552,\n \"acc_norm_stderr\": 0.04164188720169377\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.328042328042328,\n \"acc_stderr\": 0.02418049716437691,\n \"acc_norm\": 0.328042328042328,\n \"acc_norm_stderr\": 0.02418049716437691\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.04285714285714281,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.04285714285714281\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6548387096774193,\n \"acc_stderr\": 0.02704574657353433,\n \"acc_norm\": 0.6548387096774193,\n \"acc_norm_stderr\": 0.02704574657353433\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4433497536945813,\n \"acc_stderr\": 0.03495334582162934,\n \"acc_norm\": 0.4433497536945813,\n \"acc_norm_stderr\": 0.03495334582162934\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6787878787878788,\n \"acc_stderr\": 0.0364620496325381,\n \"acc_norm\": 0.6787878787878788,\n \"acc_norm_stderr\": 0.0364620496325381\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.031911782267135466,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.031911782267135466\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8238341968911918,\n \"acc_stderr\": 0.02749350424454806,\n \"acc_norm\": 0.8238341968911918,\n \"acc_norm_stderr\": 0.02749350424454806\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5282051282051282,\n \"acc_stderr\": 0.025310639254933893,\n \"acc_norm\": 0.5282051282051282,\n \"acc_norm_stderr\": 0.025310639254933893\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253252,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253252\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.592436974789916,\n \"acc_stderr\": 0.031918633744784645,\n \"acc_norm\": 0.592436974789916,\n \"acc_norm_stderr\": 0.031918633744784645\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.744954128440367,\n \"acc_stderr\": 0.01868850085653584,\n \"acc_norm\": 0.744954128440367,\n \"acc_norm_stderr\": 0.01868850085653584\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4212962962962963,\n \"acc_stderr\": 0.03367462138896078,\n \"acc_norm\": 0.4212962962962963,\n \"acc_norm_stderr\": 0.03367462138896078\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7794117647058824,\n \"acc_stderr\": 0.02910225438967408,\n \"acc_norm\": 0.7794117647058824,\n \"acc_norm_stderr\": 0.02910225438967408\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7848101265822784,\n \"acc_stderr\": 0.026750826994676177,\n \"acc_norm\": 0.7848101265822784,\n \"acc_norm_stderr\": 0.026750826994676177\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6412213740458015,\n \"acc_stderr\": 0.04206739313864908,\n \"acc_norm\": 0.6412213740458015,\n \"acc_norm_stderr\": 0.04206739313864908\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7520661157024794,\n \"acc_stderr\": 0.03941897526516303,\n \"acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.03941897526516303\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252626,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252626\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6932515337423313,\n \"acc_stderr\": 0.03623089915724145,\n \"acc_norm\": 0.6932515337423313,\n \"acc_norm_stderr\": 0.03623089915724145\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6990291262135923,\n \"acc_stderr\": 0.045416094465039476,\n \"acc_norm\": 0.6990291262135923,\n \"acc_norm_stderr\": 0.045416094465039476\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.811965811965812,\n \"acc_stderr\": 0.025598193686652268,\n \"acc_norm\": 0.811965811965812,\n \"acc_norm_stderr\": 0.025598193686652268\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7637292464878672,\n \"acc_stderr\": 0.015190473717037514,\n \"acc_norm\": 0.7637292464878672,\n \"acc_norm_stderr\": 0.015190473717037514\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6329479768786127,\n \"acc_stderr\": 0.02595005433765407,\n \"acc_norm\": 0.6329479768786127,\n \"acc_norm_stderr\": 0.02595005433765407\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.49273743016759775,\n \"acc_stderr\": 0.016720737405179514,\n \"acc_norm\": 0.49273743016759775,\n \"acc_norm_stderr\": 0.016720737405179514\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6339869281045751,\n \"acc_stderr\": 0.027582811415159614,\n \"acc_norm\": 0.6339869281045751,\n \"acc_norm_stderr\": 0.027582811415159614\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6463022508038585,\n \"acc_stderr\": 0.027155208103200865,\n \"acc_norm\": 0.6463022508038585,\n \"acc_norm_stderr\": 0.027155208103200865\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6419753086419753,\n \"acc_stderr\": 0.026675611926037103,\n \"acc_norm\": 0.6419753086419753,\n \"acc_norm_stderr\": 0.026675611926037103\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4219858156028369,\n \"acc_stderr\": 0.029462189233370593,\n \"acc_norm\": 0.4219858156028369,\n \"acc_norm_stderr\": 0.029462189233370593\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.438722294654498,\n \"acc_stderr\": 0.012673969883493272,\n \"acc_norm\": 0.438722294654498,\n \"acc_norm_stderr\": 0.012673969883493272\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5588235294117647,\n \"acc_stderr\": 0.03016191193076711,\n \"acc_norm\": 0.5588235294117647,\n \"acc_norm_stderr\": 0.03016191193076711\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5669934640522876,\n \"acc_stderr\": 0.020045442473324224,\n \"acc_norm\": 0.5669934640522876,\n \"acc_norm_stderr\": 0.020045442473324224\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6181818181818182,\n \"acc_stderr\": 0.046534298079135075,\n \"acc_norm\": 0.6181818181818182,\n \"acc_norm_stderr\": 0.046534298079135075\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6530612244897959,\n \"acc_stderr\": 0.0304725260267265,\n \"acc_norm\": 0.6530612244897959,\n \"acc_norm_stderr\": 0.0304725260267265\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7661691542288557,\n \"acc_stderr\": 0.029929415408348384,\n \"acc_norm\": 0.7661691542288557,\n \"acc_norm_stderr\": 0.029929415408348384\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4819277108433735,\n \"acc_stderr\": 0.038899512528272166,\n \"acc_norm\": 0.4819277108433735,\n \"acc_norm_stderr\": 0.038899512528272166\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.031581495393387324,\n \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.031581495393387324\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3574051407588739,\n \"mc1_stderr\": 0.016776599676729405,\n \"mc2\": 0.5065675245616744,\n \"mc2_stderr\": 0.015592549631793743\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7537490134175217,\n \"acc_stderr\": 0.012108365307437523\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.27293404094010615,\n \"acc_stderr\": 0.012270381151108758\n }\n}\n```", "repo_url": "https://huggingface.co/diffnamehard/Psyfighter2-Noromaid-ties-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-06-30.176184.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["**/details_harness|winogrande|5_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-06-30.176184.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_06_30.176184", "path": ["results_2024-01-04T12-06-30.176184.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-06-30.176184.parquet"]}]}]}
2024-01-04T12:09:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of diffnamehard/Psyfighter2-Noromaid-ties-13B Dataset automatically created during the evaluation run of model diffnamehard/Psyfighter2-Noromaid-ties-13B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:06:30.176184(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of diffnamehard/Psyfighter2-Noromaid-ties-13B\n\n\n\nDataset automatically created during the evaluation run of model diffnamehard/Psyfighter2-Noromaid-ties-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:06:30.176184(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of diffnamehard/Psyfighter2-Noromaid-ties-13B\n\n\n\nDataset automatically created during the evaluation run of model diffnamehard/Psyfighter2-Noromaid-ties-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:06:30.176184(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 69, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of diffnamehard/Psyfighter2-Noromaid-ties-13B\n\n\n\nDataset automatically created during the evaluation run of model diffnamehard/Psyfighter2-Noromaid-ties-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:06:30.176184(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
9dd25cbc8176afadbea383305136d2ed036691c4
# Dataset Card for Evaluation run of rishiraj/uncensored <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [rishiraj/uncensored](https://huggingface.co/rishiraj/uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_rishiraj__uncensored", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:11:19.373726](https://huggingface.co/datasets/open-llm-leaderboard/details_rishiraj__uncensored/blob/main/results_2024-01-04T12-11-19.373726.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6138134057467441, "acc_stderr": 0.03270091323935443, "acc_norm": 0.6170723511545717, "acc_norm_stderr": 0.03335705121648071, "mc1": 0.423500611995104, "mc1_stderr": 0.017297421448534727, "mc2": 0.5914138790054457, "mc2_stderr": 0.015571835698051038 }, "harness|arc:challenge|25": { "acc": 0.6160409556313993, "acc_stderr": 0.014212444980651892, "acc_norm": 0.6604095563139932, "acc_norm_stderr": 0.01383903976282017 }, "harness|hellaswag|10": { "acc": 0.6523600876319459, "acc_stderr": 0.004752476997887822, "acc_norm": 0.8480382393945429, "acc_norm_stderr": 0.0035825015965645496 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.562962962962963, "acc_stderr": 0.042849586397534015, "acc_norm": 0.562962962962963, "acc_norm_stderr": 0.042849586397534015 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7105263157894737, "acc_stderr": 0.03690677986137283, "acc_norm": 0.7105263157894737, "acc_norm_stderr": 0.03690677986137283 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7430555555555556, "acc_stderr": 0.03653946969442099, "acc_norm": 0.7430555555555556, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6011560693641619, "acc_stderr": 0.037336266553835096, "acc_norm": 0.6011560693641619, "acc_norm_stderr": 0.037336266553835096 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3431372549019608, "acc_stderr": 0.04724007352383888, "acc_norm": 0.3431372549019608, "acc_norm_stderr": 0.04724007352383888 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909283, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5361702127659574, "acc_stderr": 0.032600385118357715, "acc_norm": 0.5361702127659574, "acc_norm_stderr": 0.032600385118357715 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.40350877192982454, "acc_stderr": 0.04615186962583703, "acc_norm": 0.40350877192982454, "acc_norm_stderr": 0.04615186962583703 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.02510742548113728, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.02510742548113728 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.04725815626252606, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252606 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6774193548387096, "acc_stderr": 0.02659308451657228, "acc_norm": 0.6774193548387096, "acc_norm_stderr": 0.02659308451657228 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.45320197044334976, "acc_stderr": 0.035025446508458714, "acc_norm": 0.45320197044334976, "acc_norm_stderr": 0.035025446508458714 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.64, "acc_stderr": 0.048241815132442176, "acc_norm": 0.64, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7727272727272727, "acc_stderr": 0.029857515673386424, "acc_norm": 0.7727272727272727, "acc_norm_stderr": 0.029857515673386424 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8497409326424871, "acc_stderr": 0.02578772318072388, "acc_norm": 0.8497409326424871, "acc_norm_stderr": 0.02578772318072388 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6076923076923076, "acc_stderr": 0.024756000382130952, "acc_norm": 0.6076923076923076, "acc_norm_stderr": 0.024756000382130952 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.28888888888888886, "acc_stderr": 0.027634907264178544, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.027634907264178544 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6554621848739496, "acc_stderr": 0.03086868260412162, "acc_norm": 0.6554621848739496, "acc_norm_stderr": 0.03086868260412162 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8055045871559633, "acc_stderr": 0.016970289090458033, "acc_norm": 0.8055045871559633, "acc_norm_stderr": 0.016970289090458033 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4675925925925926, "acc_stderr": 0.03402801581358966, "acc_norm": 0.4675925925925926, "acc_norm_stderr": 0.03402801581358966 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7941176470588235, "acc_stderr": 0.028379449451588663, "acc_norm": 0.7941176470588235, "acc_norm_stderr": 0.028379449451588663 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7637130801687764, "acc_stderr": 0.02765215314415925, "acc_norm": 0.7637130801687764, "acc_norm_stderr": 0.02765215314415925 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313729, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8264462809917356, "acc_stderr": 0.03457272836917669, "acc_norm": 0.8264462809917356, "acc_norm_stderr": 0.03457272836917669 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7300613496932515, "acc_stderr": 0.03487825168497892, "acc_norm": 0.7300613496932515, "acc_norm_stderr": 0.03487825168497892 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5178571428571429, "acc_stderr": 0.04742762361243011, "acc_norm": 0.5178571428571429, "acc_norm_stderr": 0.04742762361243011 }, "harness|hendrycksTest-management|5": { "acc": 0.7475728155339806, "acc_stderr": 0.04301250399690878, "acc_norm": 0.7475728155339806, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597542, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597542 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.68, "acc_stderr": 0.046882617226215034, "acc_norm": 0.68, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7918263090676884, "acc_stderr": 0.014518592248904033, "acc_norm": 0.7918263090676884, "acc_norm_stderr": 0.014518592248904033 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7023121387283237, "acc_stderr": 0.024617055388677003, "acc_norm": 0.7023121387283237, "acc_norm_stderr": 0.024617055388677003 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3664804469273743, "acc_stderr": 0.016115235504865467, "acc_norm": 0.3664804469273743, "acc_norm_stderr": 0.016115235504865467 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.696078431372549, "acc_stderr": 0.026336613469046626, "acc_norm": 0.696078431372549, "acc_norm_stderr": 0.026336613469046626 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6720257234726688, "acc_stderr": 0.026664410886937613, "acc_norm": 0.6720257234726688, "acc_norm_stderr": 0.026664410886937613 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6944444444444444, "acc_stderr": 0.025630824975621348, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.025630824975621348 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46099290780141844, "acc_stderr": 0.02973659252642444, "acc_norm": 0.46099290780141844, "acc_norm_stderr": 0.02973659252642444 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4511082138200782, "acc_stderr": 0.012709037347346233, "acc_norm": 0.4511082138200782, "acc_norm_stderr": 0.012709037347346233 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6213235294117647, "acc_stderr": 0.02946513363977613, "acc_norm": 0.6213235294117647, "acc_norm_stderr": 0.02946513363977613 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6421568627450981, "acc_stderr": 0.019393058402355442, "acc_norm": 0.6421568627450981, "acc_norm_stderr": 0.019393058402355442 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.04582004841505417, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.04582004841505417 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6119402985074627, "acc_stderr": 0.034457899643627506, "acc_norm": 0.6119402985074627, "acc_norm_stderr": 0.034457899643627506 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.035887028128263686, "acc_norm": 0.85, "acc_norm_stderr": 0.035887028128263686 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8421052631578947, "acc_stderr": 0.027966785859160875, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.027966785859160875 }, "harness|truthfulqa:mc|0": { "mc1": 0.423500611995104, "mc1_stderr": 0.017297421448534727, "mc2": 0.5914138790054457, "mc2_stderr": 0.015571835698051038 }, "harness|winogrande|5": { "acc": 0.7932123125493291, "acc_stderr": 0.011382566829235798 }, "harness|gsm8k|5": { "acc": 0.48218347232752085, "acc_stderr": 0.013763738379867923 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_rishiraj__uncensored
[ "region:us" ]
2024-01-04T12:13:35+00:00
{"pretty_name": "Evaluation run of rishiraj/uncensored", "dataset_summary": "Dataset automatically created during the evaluation run of model [rishiraj/uncensored](https://huggingface.co/rishiraj/uncensored) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_rishiraj__uncensored\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:11:19.373726](https://huggingface.co/datasets/open-llm-leaderboard/details_rishiraj__uncensored/blob/main/results_2024-01-04T12-11-19.373726.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6138134057467441,\n \"acc_stderr\": 0.03270091323935443,\n \"acc_norm\": 0.6170723511545717,\n \"acc_norm_stderr\": 0.03335705121648071,\n \"mc1\": 0.423500611995104,\n \"mc1_stderr\": 0.017297421448534727,\n \"mc2\": 0.5914138790054457,\n \"mc2_stderr\": 0.015571835698051038\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6160409556313993,\n \"acc_stderr\": 0.014212444980651892,\n \"acc_norm\": 0.6604095563139932,\n \"acc_norm_stderr\": 0.01383903976282017\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6523600876319459,\n \"acc_stderr\": 0.004752476997887822,\n \"acc_norm\": 0.8480382393945429,\n \"acc_norm_stderr\": 0.0035825015965645496\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542129,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542129\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n \"acc_stderr\": 0.042849586397534015,\n \"acc_norm\": 0.562962962962963,\n \"acc_norm_stderr\": 0.042849586397534015\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7105263157894737,\n \"acc_stderr\": 0.03690677986137283,\n \"acc_norm\": 0.7105263157894737,\n \"acc_norm_stderr\": 0.03690677986137283\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7430555555555556,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.7430555555555556,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6011560693641619,\n \"acc_stderr\": 0.037336266553835096,\n \"acc_norm\": 0.6011560693641619,\n \"acc_norm_stderr\": 0.037336266553835096\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3431372549019608,\n \"acc_stderr\": 0.04724007352383888,\n \"acc_norm\": 0.3431372549019608,\n \"acc_norm_stderr\": 0.04724007352383888\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5361702127659574,\n \"acc_stderr\": 0.032600385118357715,\n \"acc_norm\": 0.5361702127659574,\n \"acc_norm_stderr\": 0.032600385118357715\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.40350877192982454,\n \"acc_stderr\": 0.04615186962583703,\n \"acc_norm\": 0.40350877192982454,\n \"acc_norm_stderr\": 0.04615186962583703\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.02510742548113728,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.02510742548113728\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252606,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252606\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6774193548387096,\n \"acc_stderr\": 0.02659308451657228,\n \"acc_norm\": 0.6774193548387096,\n \"acc_norm_stderr\": 0.02659308451657228\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.45320197044334976,\n \"acc_stderr\": 0.035025446508458714,\n \"acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.035025446508458714\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7727272727272727,\n \"acc_stderr\": 0.029857515673386424,\n \"acc_norm\": 0.7727272727272727,\n \"acc_norm_stderr\": 0.029857515673386424\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8497409326424871,\n \"acc_stderr\": 0.02578772318072388,\n \"acc_norm\": 0.8497409326424871,\n \"acc_norm_stderr\": 0.02578772318072388\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6076923076923076,\n \"acc_stderr\": 0.024756000382130952,\n \"acc_norm\": 0.6076923076923076,\n \"acc_norm_stderr\": 0.024756000382130952\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.28888888888888886,\n \"acc_stderr\": 0.027634907264178544,\n \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.027634907264178544\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6554621848739496,\n \"acc_stderr\": 0.03086868260412162,\n \"acc_norm\": 0.6554621848739496,\n \"acc_norm_stderr\": 0.03086868260412162\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8055045871559633,\n \"acc_stderr\": 0.016970289090458033,\n \"acc_norm\": 0.8055045871559633,\n \"acc_norm_stderr\": 0.016970289090458033\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4675925925925926,\n \"acc_stderr\": 0.03402801581358966,\n \"acc_norm\": 0.4675925925925926,\n \"acc_norm_stderr\": 0.03402801581358966\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7941176470588235,\n \"acc_stderr\": 0.028379449451588663,\n \"acc_norm\": 0.7941176470588235,\n \"acc_norm_stderr\": 0.028379449451588663\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7637130801687764,\n \"acc_stderr\": 0.02765215314415925,\n \"acc_norm\": 0.7637130801687764,\n \"acc_norm_stderr\": 0.02765215314415925\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8264462809917356,\n \"acc_stderr\": 0.03457272836917669,\n \"acc_norm\": 0.8264462809917356,\n \"acc_norm_stderr\": 0.03457272836917669\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7300613496932515,\n \"acc_stderr\": 0.03487825168497892,\n \"acc_norm\": 0.7300613496932515,\n \"acc_norm_stderr\": 0.03487825168497892\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5178571428571429,\n \"acc_stderr\": 0.04742762361243011,\n \"acc_norm\": 0.5178571428571429,\n \"acc_norm_stderr\": 0.04742762361243011\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7475728155339806,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.7475728155339806,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597542,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597542\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7918263090676884,\n \"acc_stderr\": 0.014518592248904033,\n \"acc_norm\": 0.7918263090676884,\n \"acc_norm_stderr\": 0.014518592248904033\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7023121387283237,\n \"acc_stderr\": 0.024617055388677003,\n \"acc_norm\": 0.7023121387283237,\n \"acc_norm_stderr\": 0.024617055388677003\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3664804469273743,\n \"acc_stderr\": 0.016115235504865467,\n \"acc_norm\": 0.3664804469273743,\n \"acc_norm_stderr\": 0.016115235504865467\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.696078431372549,\n \"acc_stderr\": 0.026336613469046626,\n \"acc_norm\": 0.696078431372549,\n \"acc_norm_stderr\": 0.026336613469046626\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6720257234726688,\n \"acc_stderr\": 0.026664410886937613,\n \"acc_norm\": 0.6720257234726688,\n \"acc_norm_stderr\": 0.026664410886937613\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.025630824975621348,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.025630824975621348\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46099290780141844,\n \"acc_stderr\": 0.02973659252642444,\n \"acc_norm\": 0.46099290780141844,\n \"acc_norm_stderr\": 0.02973659252642444\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4511082138200782,\n \"acc_stderr\": 0.012709037347346233,\n \"acc_norm\": 0.4511082138200782,\n \"acc_norm_stderr\": 0.012709037347346233\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6213235294117647,\n \"acc_stderr\": 0.02946513363977613,\n \"acc_norm\": 0.6213235294117647,\n \"acc_norm_stderr\": 0.02946513363977613\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6421568627450981,\n \"acc_stderr\": 0.019393058402355442,\n \"acc_norm\": 0.6421568627450981,\n \"acc_norm_stderr\": 0.019393058402355442\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.04582004841505417,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.04582004841505417\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6119402985074627,\n \"acc_stderr\": 0.034457899643627506,\n \"acc_norm\": 0.6119402985074627,\n \"acc_norm_stderr\": 0.034457899643627506\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.035887028128263686,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.035887028128263686\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160875,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160875\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.423500611995104,\n \"mc1_stderr\": 0.017297421448534727,\n \"mc2\": 0.5914138790054457,\n \"mc2_stderr\": 0.015571835698051038\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7932123125493291,\n \"acc_stderr\": 0.011382566829235798\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.48218347232752085,\n \"acc_stderr\": 0.013763738379867923\n }\n}\n```", "repo_url": "https://huggingface.co/rishiraj/uncensored", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-11-19.373726.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["**/details_harness|winogrande|5_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-11-19.373726.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_11_19.373726", "path": ["results_2024-01-04T12-11-19.373726.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-11-19.373726.parquet"]}]}]}
2024-01-04T12:13:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of rishiraj/uncensored Dataset automatically created during the evaluation run of model rishiraj/uncensored on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:11:19.373726(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of rishiraj/uncensored\n\n\n\nDataset automatically created during the evaluation run of model rishiraj/uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:11:19.373726(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of rishiraj/uncensored\n\n\n\nDataset automatically created during the evaluation run of model rishiraj/uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:11:19.373726(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 175, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of rishiraj/uncensored\n\n\n\nDataset automatically created during the evaluation run of model rishiraj/uncensored on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:11:19.373726(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
bbcfd99a3a0954d8732819231349746f191392b8
# Dataset Card for Evaluation run of TomGrc/FusionNet <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [TomGrc/FusionNet](https://huggingface.co/TomGrc/FusionNet) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TomGrc__FusionNet", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:12:49.231518](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FusionNet/blob/main/results_2024-01-04T12-12-49.231518.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6672981908741114, "acc_stderr": 0.031616068911940555, "acc_norm": 0.6681680299548688, "acc_norm_stderr": 0.032258823353895884, "mc1": 0.5740514075887393, "mc1_stderr": 0.01731047190407654, "mc2": 0.7195314778980147, "mc2_stderr": 0.015001196424578202 }, "harness|arc:challenge|25": { "acc": 0.6834470989761092, "acc_stderr": 0.013592431519068079, "acc_norm": 0.712457337883959, "acc_norm_stderr": 0.013226719056266125 }, "harness|hellaswag|10": { "acc": 0.7133041226847242, "acc_stderr": 0.004512940497462742, "acc_norm": 0.8841864170483967, "acc_norm_stderr": 0.0031934725302821725 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.44, "acc_stderr": 0.0498887651569859, "acc_norm": 0.44, "acc_norm_stderr": 0.0498887651569859 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.756578947368421, "acc_stderr": 0.034923496688842384, "acc_norm": 0.756578947368421, "acc_norm_stderr": 0.034923496688842384 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956913, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956913 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768077, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.625531914893617, "acc_stderr": 0.03163910665367291, "acc_norm": 0.625531914893617, "acc_norm_stderr": 0.03163910665367291 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6344827586206897, "acc_stderr": 0.040131241954243856, "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.040131241954243856 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4973544973544973, "acc_stderr": 0.02575094967813039, "acc_norm": 0.4973544973544973, "acc_norm_stderr": 0.02575094967813039 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8193548387096774, "acc_stderr": 0.021886178567172534, "acc_norm": 0.8193548387096774, "acc_norm_stderr": 0.021886178567172534 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.03517945038691063, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603348, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603348 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563976, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563976 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.02944316932303154, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.02944316932303154 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.029344572500634332, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.029344572500634332 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374308, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374308 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5740740740740741, "acc_stderr": 0.03372343271653062, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.03372343271653062 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156862, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156862 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8481012658227848, "acc_stderr": 0.023363878096632446, "acc_norm": 0.8481012658227848, "acc_norm_stderr": 0.023363878096632446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596915, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596915 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.03492606476623791, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.03492606476623791 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.0230866350868414, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.0230866350868414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8045977011494253, "acc_stderr": 0.014179171373424383, "acc_norm": 0.8045977011494253, "acc_norm_stderr": 0.014179171373424383 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7543352601156069, "acc_stderr": 0.023176298203992005, "acc_norm": 0.7543352601156069, "acc_norm_stderr": 0.023176298203992005 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.39217877094972065, "acc_stderr": 0.016329061073207446, "acc_norm": 0.39217877094972065, "acc_norm_stderr": 0.016329061073207446 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7549019607843137, "acc_stderr": 0.02463004897982478, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.02463004897982478 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.729903536977492, "acc_stderr": 0.02521804037341062, "acc_norm": 0.729903536977492, "acc_norm_stderr": 0.02521804037341062 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7839506172839507, "acc_stderr": 0.022899162918445806, "acc_norm": 0.7839506172839507, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4934810951760104, "acc_stderr": 0.012769150688867503, "acc_norm": 0.4934810951760104, "acc_norm_stderr": 0.012769150688867503 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7389705882352942, "acc_stderr": 0.026679252270103128, "acc_norm": 0.7389705882352942, "acc_norm_stderr": 0.026679252270103128 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6764705882352942, "acc_stderr": 0.018926082916083383, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.018926082916083383 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142783, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142783 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.028762349126466125, "acc_norm": 0.91, "acc_norm_stderr": 0.028762349126466125 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598053, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598053 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03188578017686398, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03188578017686398 }, "harness|truthfulqa:mc|0": { "mc1": 0.5740514075887393, "mc1_stderr": 0.01731047190407654, "mc2": 0.7195314778980147, "mc2_stderr": 0.015001196424578202 }, "harness|winogrande|5": { "acc": 0.8326756116811366, "acc_stderr": 0.010490608806828075 }, "harness|gsm8k|5": { "acc": 0.6504927975739196, "acc_stderr": 0.013133836511705991 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_TomGrc__FusionNet
[ "region:us" ]
2024-01-04T12:15:05+00:00
{"pretty_name": "Evaluation run of TomGrc/FusionNet", "dataset_summary": "Dataset automatically created during the evaluation run of model [TomGrc/FusionNet](https://huggingface.co/TomGrc/FusionNet) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TomGrc__FusionNet\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:12:49.231518](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FusionNet/blob/main/results_2024-01-04T12-12-49.231518.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6672981908741114,\n \"acc_stderr\": 0.031616068911940555,\n \"acc_norm\": 0.6681680299548688,\n \"acc_norm_stderr\": 0.032258823353895884,\n \"mc1\": 0.5740514075887393,\n \"mc1_stderr\": 0.01731047190407654,\n \"mc2\": 0.7195314778980147,\n \"mc2_stderr\": 0.015001196424578202\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6834470989761092,\n \"acc_stderr\": 0.013592431519068079,\n \"acc_norm\": 0.712457337883959,\n \"acc_norm_stderr\": 0.013226719056266125\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7133041226847242,\n \"acc_stderr\": 0.004512940497462742,\n \"acc_norm\": 0.8841864170483967,\n \"acc_norm_stderr\": 0.0031934725302821725\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.0498887651569859,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.0498887651569859\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.756578947368421,\n \"acc_stderr\": 0.034923496688842384,\n \"acc_norm\": 0.756578947368421,\n \"acc_norm_stderr\": 0.034923496688842384\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956913,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956913\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768077,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768077\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.625531914893617,\n \"acc_stderr\": 0.03163910665367291,\n \"acc_norm\": 0.625531914893617,\n \"acc_norm_stderr\": 0.03163910665367291\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6344827586206897,\n \"acc_stderr\": 0.040131241954243856,\n \"acc_norm\": 0.6344827586206897,\n \"acc_norm_stderr\": 0.040131241954243856\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4973544973544973,\n \"acc_stderr\": 0.02575094967813039,\n \"acc_norm\": 0.4973544973544973,\n \"acc_norm_stderr\": 0.02575094967813039\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8193548387096774,\n \"acc_stderr\": 0.021886178567172534,\n \"acc_norm\": 0.8193548387096774,\n \"acc_norm_stderr\": 0.021886178567172534\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563976,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563976\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.02944316932303154,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.02944316932303154\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.029344572500634332,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.029344572500634332\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374308,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374308\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.03372343271653062,\n \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.03372343271653062\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156862,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156862\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8481012658227848,\n \"acc_stderr\": 0.023363878096632446,\n \"acc_norm\": 0.8481012658227848,\n \"acc_norm_stderr\": 0.023363878096632446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596915,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596915\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.03492606476623791,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.03492606476623791\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.0230866350868414,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.0230866350868414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8045977011494253,\n \"acc_stderr\": 0.014179171373424383,\n \"acc_norm\": 0.8045977011494253,\n \"acc_norm_stderr\": 0.014179171373424383\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7543352601156069,\n \"acc_stderr\": 0.023176298203992005,\n \"acc_norm\": 0.7543352601156069,\n \"acc_norm_stderr\": 0.023176298203992005\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.39217877094972065,\n \"acc_stderr\": 0.016329061073207446,\n \"acc_norm\": 0.39217877094972065,\n \"acc_norm_stderr\": 0.016329061073207446\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.02463004897982478,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.02463004897982478\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.729903536977492,\n \"acc_stderr\": 0.02521804037341062,\n \"acc_norm\": 0.729903536977492,\n \"acc_norm_stderr\": 0.02521804037341062\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7839506172839507,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.7839506172839507,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4934810951760104,\n \"acc_stderr\": 0.012769150688867503,\n \"acc_norm\": 0.4934810951760104,\n \"acc_norm_stderr\": 0.012769150688867503\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7389705882352942,\n \"acc_stderr\": 0.026679252270103128,\n \"acc_norm\": 0.7389705882352942,\n \"acc_norm_stderr\": 0.026679252270103128\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.018926082916083383,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.018926082916083383\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142783,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142783\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466125,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466125\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03188578017686398,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03188578017686398\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5740514075887393,\n \"mc1_stderr\": 0.01731047190407654,\n \"mc2\": 0.7195314778980147,\n \"mc2_stderr\": 0.015001196424578202\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8326756116811366,\n \"acc_stderr\": 0.010490608806828075\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6504927975739196,\n \"acc_stderr\": 0.013133836511705991\n }\n}\n```", "repo_url": "https://huggingface.co/TomGrc/FusionNet", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-12-49.231518.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["**/details_harness|winogrande|5_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-12-49.231518.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_12_49.231518", "path": ["results_2024-01-04T12-12-49.231518.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-12-49.231518.parquet"]}]}]}
2024-01-04T12:15:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TomGrc/FusionNet Dataset automatically created during the evaluation run of model TomGrc/FusionNet on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:12:49.231518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of TomGrc/FusionNet\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:12:49.231518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TomGrc/FusionNet\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:12:49.231518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 175, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TomGrc/FusionNet\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:12:49.231518(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
75816107dd3ea3a63619cd98070b29169cfb1ef2
# Dataset Card for Evaluation run of occultml/Helios-10.7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [occultml/Helios-10.7B](https://huggingface.co/occultml/Helios-10.7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_occultml__Helios-10.7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:17:31.612101](https://huggingface.co/datasets/open-llm-leaderboard/details_occultml__Helios-10.7B/blob/main/results_2024-01-04T12-17-31.612101.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.40988707960386334, "acc_stderr": 0.03401984092738561, "acc_norm": 0.414422676033673, "acc_norm_stderr": 0.03496456895834615, "mc1": 0.30599755201958384, "mc1_stderr": 0.016132229728155045, "mc2": 0.5552021116757884, "mc2_stderr": 0.01659507343053494 }, "harness|arc:challenge|25": { "acc": 0.35665529010238906, "acc_stderr": 0.013998056902620203, "acc_norm": 0.3890784982935154, "acc_norm_stderr": 0.014247309976045609 }, "harness|hellaswag|10": { "acc": 0.3434574785899223, "acc_stderr": 0.004738920624724474, "acc_norm": 0.4660426209918343, "acc_norm_stderr": 0.004978260641742204 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750575, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750575 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.46710526315789475, "acc_stderr": 0.040601270352363966, "acc_norm": 0.46710526315789475, "acc_norm_stderr": 0.040601270352363966 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.41, "acc_stderr": 0.04943110704237102, "acc_norm": 0.41, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.43018867924528303, "acc_stderr": 0.030471445867183238, "acc_norm": 0.43018867924528303, "acc_norm_stderr": 0.030471445867183238 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3888888888888889, "acc_stderr": 0.04076663253918567, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.04076663253918567 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.23, "acc_stderr": 0.04229525846816508, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816508 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3872832369942196, "acc_stderr": 0.037143259063020656, "acc_norm": 0.3872832369942196, "acc_norm_stderr": 0.037143259063020656 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.044405219061793275, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.044405219061793275 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.34893617021276596, "acc_stderr": 0.031158522131357797, "acc_norm": 0.34893617021276596, "acc_norm_stderr": 0.031158522131357797 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3508771929824561, "acc_stderr": 0.044895393502706986, "acc_norm": 0.3508771929824561, "acc_norm_stderr": 0.044895393502706986 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.30344827586206896, "acc_stderr": 0.038312260488503336, "acc_norm": 0.30344827586206896, "acc_norm_stderr": 0.038312260488503336 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24338624338624337, "acc_stderr": 0.022101128787415415, "acc_norm": 0.24338624338624337, "acc_norm_stderr": 0.022101128787415415 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23809523809523808, "acc_stderr": 0.03809523809523812, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.03809523809523812 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.4935483870967742, "acc_stderr": 0.02844163823354051, "acc_norm": 0.4935483870967742, "acc_norm_stderr": 0.02844163823354051 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3793103448275862, "acc_stderr": 0.034139638059062345, "acc_norm": 0.3793103448275862, "acc_norm_stderr": 0.034139638059062345 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.4666666666666667, "acc_stderr": 0.03895658065271847, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.03895658065271847 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.47474747474747475, "acc_stderr": 0.03557806245087314, "acc_norm": 0.47474747474747475, "acc_norm_stderr": 0.03557806245087314 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.5077720207253886, "acc_stderr": 0.036080032255696545, "acc_norm": 0.5077720207253886, "acc_norm_stderr": 0.036080032255696545 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.37435897435897436, "acc_stderr": 0.024537591572830506, "acc_norm": 0.37435897435897436, "acc_norm_stderr": 0.024537591572830506 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.29259259259259257, "acc_stderr": 0.027738969632176088, "acc_norm": 0.29259259259259257, "acc_norm_stderr": 0.027738969632176088 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.40756302521008403, "acc_stderr": 0.03191863374478465, "acc_norm": 0.40756302521008403, "acc_norm_stderr": 0.03191863374478465 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2582781456953642, "acc_stderr": 0.035737053147634576, "acc_norm": 0.2582781456953642, "acc_norm_stderr": 0.035737053147634576 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.4954128440366973, "acc_stderr": 0.021436420955529424, "acc_norm": 0.4954128440366973, "acc_norm_stderr": 0.021436420955529424 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.24537037037037038, "acc_stderr": 0.029346665094372937, "acc_norm": 0.24537037037037038, "acc_norm_stderr": 0.029346665094372937 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.4019607843137255, "acc_stderr": 0.034411900234824655, "acc_norm": 0.4019607843137255, "acc_norm_stderr": 0.034411900234824655 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.48523206751054854, "acc_stderr": 0.032533028078777386, "acc_norm": 0.48523206751054854, "acc_norm_stderr": 0.032533028078777386 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.4663677130044843, "acc_stderr": 0.033481800170603065, "acc_norm": 0.4663677130044843, "acc_norm_stderr": 0.033481800170603065 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.4580152671755725, "acc_stderr": 0.04369802690578756, "acc_norm": 0.4580152671755725, "acc_norm_stderr": 0.04369802690578756 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6033057851239669, "acc_stderr": 0.044658697805310094, "acc_norm": 0.6033057851239669, "acc_norm_stderr": 0.044658697805310094 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5370370370370371, "acc_stderr": 0.04820403072760627, "acc_norm": 0.5370370370370371, "acc_norm_stderr": 0.04820403072760627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.49693251533742333, "acc_stderr": 0.03928297078179663, "acc_norm": 0.49693251533742333, "acc_norm_stderr": 0.03928297078179663 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.04364226155841044, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.04364226155841044 }, "harness|hendrycksTest-management|5": { "acc": 0.5048543689320388, "acc_stderr": 0.04950504382128919, "acc_norm": 0.5048543689320388, "acc_norm_stderr": 0.04950504382128919 }, "harness|hendrycksTest-marketing|5": { "acc": 0.5384615384615384, "acc_stderr": 0.03265903381186193, "acc_norm": 0.5384615384615384, "acc_norm_stderr": 0.03265903381186193 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5504469987228607, "acc_stderr": 0.017788725283507337, "acc_norm": 0.5504469987228607, "acc_norm_stderr": 0.017788725283507337 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5289017341040463, "acc_stderr": 0.026874085883518348, "acc_norm": 0.5289017341040463, "acc_norm_stderr": 0.026874085883518348 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2, "acc_stderr": 0.013378001241813068, "acc_norm": 0.2, "acc_norm_stderr": 0.013378001241813068 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.49673202614379086, "acc_stderr": 0.02862930519400354, "acc_norm": 0.49673202614379086, "acc_norm_stderr": 0.02862930519400354 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5819935691318328, "acc_stderr": 0.028013651891995072, "acc_norm": 0.5819935691318328, "acc_norm_stderr": 0.028013651891995072 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.49382716049382713, "acc_stderr": 0.027818623962583302, "acc_norm": 0.49382716049382713, "acc_norm_stderr": 0.027818623962583302 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2907801418439716, "acc_stderr": 0.027090664368353178, "acc_norm": 0.2907801418439716, "acc_norm_stderr": 0.027090664368353178 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.29335071707953064, "acc_stderr": 0.011628520449582071, "acc_norm": 0.29335071707953064, "acc_norm_stderr": 0.011628520449582071 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.26838235294117646, "acc_stderr": 0.026917481224377232, "acc_norm": 0.26838235294117646, "acc_norm_stderr": 0.026917481224377232 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.46895424836601307, "acc_stderr": 0.020188804456361883, "acc_norm": 0.46895424836601307, "acc_norm_stderr": 0.020188804456361883 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.4818181818181818, "acc_stderr": 0.04785964010794917, "acc_norm": 0.4818181818181818, "acc_norm_stderr": 0.04785964010794917 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.37142857142857144, "acc_stderr": 0.030932858792789848, "acc_norm": 0.37142857142857144, "acc_norm_stderr": 0.030932858792789848 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5472636815920398, "acc_stderr": 0.03519702717576915, "acc_norm": 0.5472636815920398, "acc_norm_stderr": 0.03519702717576915 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.67, "acc_stderr": 0.047258156262526094, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526094 }, "harness|hendrycksTest-virology|5": { "acc": 0.3614457831325301, "acc_stderr": 0.037400593820293204, "acc_norm": 0.3614457831325301, "acc_norm_stderr": 0.037400593820293204 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6023391812865497, "acc_stderr": 0.03753638955761691, "acc_norm": 0.6023391812865497, "acc_norm_stderr": 0.03753638955761691 }, "harness|truthfulqa:mc|0": { "mc1": 0.30599755201958384, "mc1_stderr": 0.016132229728155045, "mc2": 0.5552021116757884, "mc2_stderr": 0.01659507343053494 }, "harness|winogrande|5": { "acc": 0.7071823204419889, "acc_stderr": 0.012789321118542616 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_occultml__Helios-10.7B
[ "region:us" ]
2024-01-04T12:19:47+00:00
{"pretty_name": "Evaluation run of occultml/Helios-10.7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [occultml/Helios-10.7B](https://huggingface.co/occultml/Helios-10.7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_occultml__Helios-10.7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:17:31.612101](https://huggingface.co/datasets/open-llm-leaderboard/details_occultml__Helios-10.7B/blob/main/results_2024-01-04T12-17-31.612101.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.40988707960386334,\n \"acc_stderr\": 0.03401984092738561,\n \"acc_norm\": 0.414422676033673,\n \"acc_norm_stderr\": 0.03496456895834615,\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.016132229728155045,\n \"mc2\": 0.5552021116757884,\n \"mc2_stderr\": 0.01659507343053494\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.35665529010238906,\n \"acc_stderr\": 0.013998056902620203,\n \"acc_norm\": 0.3890784982935154,\n \"acc_norm_stderr\": 0.014247309976045609\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.3434574785899223,\n \"acc_stderr\": 0.004738920624724474,\n \"acc_norm\": 0.4660426209918343,\n \"acc_norm_stderr\": 0.004978260641742204\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n \"acc_stderr\": 0.04313531696750575,\n \"acc_norm\": 0.4740740740740741,\n \"acc_norm_stderr\": 0.04313531696750575\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.46710526315789475,\n \"acc_stderr\": 0.040601270352363966,\n \"acc_norm\": 0.46710526315789475,\n \"acc_norm_stderr\": 0.040601270352363966\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.43018867924528303,\n \"acc_stderr\": 0.030471445867183238,\n \"acc_norm\": 0.43018867924528303,\n \"acc_norm_stderr\": 0.030471445867183238\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.04076663253918567,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.04076663253918567\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816508,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816508\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3872832369942196,\n \"acc_stderr\": 0.037143259063020656,\n \"acc_norm\": 0.3872832369942196,\n \"acc_norm_stderr\": 0.037143259063020656\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.044405219061793275,\n \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.044405219061793275\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.34893617021276596,\n \"acc_stderr\": 0.031158522131357797,\n \"acc_norm\": 0.34893617021276596,\n \"acc_norm_stderr\": 0.031158522131357797\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3508771929824561,\n \"acc_stderr\": 0.044895393502706986,\n \"acc_norm\": 0.3508771929824561,\n \"acc_norm_stderr\": 0.044895393502706986\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.30344827586206896,\n \"acc_stderr\": 0.038312260488503336,\n \"acc_norm\": 0.30344827586206896,\n \"acc_norm_stderr\": 0.038312260488503336\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.24338624338624337,\n \"acc_stderr\": 0.022101128787415415,\n \"acc_norm\": 0.24338624338624337,\n \"acc_norm_stderr\": 0.022101128787415415\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23809523809523808,\n \"acc_stderr\": 0.03809523809523812,\n \"acc_norm\": 0.23809523809523808,\n \"acc_norm_stderr\": 0.03809523809523812\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.4935483870967742,\n \"acc_stderr\": 0.02844163823354051,\n \"acc_norm\": 0.4935483870967742,\n \"acc_norm_stderr\": 0.02844163823354051\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3793103448275862,\n \"acc_stderr\": 0.034139638059062345,\n \"acc_norm\": 0.3793103448275862,\n \"acc_norm_stderr\": 0.034139638059062345\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.4666666666666667,\n \"acc_stderr\": 0.03895658065271847,\n \"acc_norm\": 0.4666666666666667,\n \"acc_norm_stderr\": 0.03895658065271847\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.47474747474747475,\n \"acc_stderr\": 0.03557806245087314,\n \"acc_norm\": 0.47474747474747475,\n \"acc_norm_stderr\": 0.03557806245087314\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.5077720207253886,\n \"acc_stderr\": 0.036080032255696545,\n \"acc_norm\": 0.5077720207253886,\n \"acc_norm_stderr\": 0.036080032255696545\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.37435897435897436,\n \"acc_stderr\": 0.024537591572830506,\n \"acc_norm\": 0.37435897435897436,\n \"acc_norm_stderr\": 0.024537591572830506\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.29259259259259257,\n \"acc_stderr\": 0.027738969632176088,\n \"acc_norm\": 0.29259259259259257,\n \"acc_norm_stderr\": 0.027738969632176088\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.40756302521008403,\n \"acc_stderr\": 0.03191863374478465,\n \"acc_norm\": 0.40756302521008403,\n \"acc_norm_stderr\": 0.03191863374478465\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2582781456953642,\n \"acc_stderr\": 0.035737053147634576,\n \"acc_norm\": 0.2582781456953642,\n \"acc_norm_stderr\": 0.035737053147634576\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.4954128440366973,\n \"acc_stderr\": 0.021436420955529424,\n \"acc_norm\": 0.4954128440366973,\n \"acc_norm_stderr\": 0.021436420955529424\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.24537037037037038,\n \"acc_stderr\": 0.029346665094372937,\n \"acc_norm\": 0.24537037037037038,\n \"acc_norm_stderr\": 0.029346665094372937\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.4019607843137255,\n \"acc_stderr\": 0.034411900234824655,\n \"acc_norm\": 0.4019607843137255,\n \"acc_norm_stderr\": 0.034411900234824655\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.48523206751054854,\n \"acc_stderr\": 0.032533028078777386,\n \"acc_norm\": 0.48523206751054854,\n \"acc_norm_stderr\": 0.032533028078777386\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.4663677130044843,\n \"acc_stderr\": 0.033481800170603065,\n \"acc_norm\": 0.4663677130044843,\n \"acc_norm_stderr\": 0.033481800170603065\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.4580152671755725,\n \"acc_stderr\": 0.04369802690578756,\n \"acc_norm\": 0.4580152671755725,\n \"acc_norm_stderr\": 0.04369802690578756\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6033057851239669,\n \"acc_stderr\": 0.044658697805310094,\n \"acc_norm\": 0.6033057851239669,\n \"acc_norm_stderr\": 0.044658697805310094\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5370370370370371,\n \"acc_stderr\": 0.04820403072760627,\n \"acc_norm\": 0.5370370370370371,\n \"acc_norm_stderr\": 0.04820403072760627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.49693251533742333,\n \"acc_stderr\": 0.03928297078179663,\n \"acc_norm\": 0.49693251533742333,\n \"acc_norm_stderr\": 0.03928297078179663\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.30357142857142855,\n \"acc_stderr\": 0.04364226155841044,\n \"acc_norm\": 0.30357142857142855,\n \"acc_norm_stderr\": 0.04364226155841044\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.5048543689320388,\n \"acc_stderr\": 0.04950504382128919,\n \"acc_norm\": 0.5048543689320388,\n \"acc_norm_stderr\": 0.04950504382128919\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.5384615384615384,\n \"acc_stderr\": 0.03265903381186193,\n \"acc_norm\": 0.5384615384615384,\n \"acc_norm_stderr\": 0.03265903381186193\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5504469987228607,\n \"acc_stderr\": 0.017788725283507337,\n \"acc_norm\": 0.5504469987228607,\n \"acc_norm_stderr\": 0.017788725283507337\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5289017341040463,\n \"acc_stderr\": 0.026874085883518348,\n \"acc_norm\": 0.5289017341040463,\n \"acc_norm_stderr\": 0.026874085883518348\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.013378001241813068,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.013378001241813068\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.49673202614379086,\n \"acc_stderr\": 0.02862930519400354,\n \"acc_norm\": 0.49673202614379086,\n \"acc_norm_stderr\": 0.02862930519400354\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5819935691318328,\n \"acc_stderr\": 0.028013651891995072,\n \"acc_norm\": 0.5819935691318328,\n \"acc_norm_stderr\": 0.028013651891995072\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.49382716049382713,\n \"acc_stderr\": 0.027818623962583302,\n \"acc_norm\": 0.49382716049382713,\n \"acc_norm_stderr\": 0.027818623962583302\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2907801418439716,\n \"acc_stderr\": 0.027090664368353178,\n \"acc_norm\": 0.2907801418439716,\n \"acc_norm_stderr\": 0.027090664368353178\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.29335071707953064,\n \"acc_stderr\": 0.011628520449582071,\n \"acc_norm\": 0.29335071707953064,\n \"acc_norm_stderr\": 0.011628520449582071\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.26838235294117646,\n \"acc_stderr\": 0.026917481224377232,\n \"acc_norm\": 0.26838235294117646,\n \"acc_norm_stderr\": 0.026917481224377232\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.46895424836601307,\n \"acc_stderr\": 0.020188804456361883,\n \"acc_norm\": 0.46895424836601307,\n \"acc_norm_stderr\": 0.020188804456361883\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.4818181818181818,\n \"acc_stderr\": 0.04785964010794917,\n \"acc_norm\": 0.4818181818181818,\n \"acc_norm_stderr\": 0.04785964010794917\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.37142857142857144,\n \"acc_stderr\": 0.030932858792789848,\n \"acc_norm\": 0.37142857142857144,\n \"acc_norm_stderr\": 0.030932858792789848\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5472636815920398,\n \"acc_stderr\": 0.03519702717576915,\n \"acc_norm\": 0.5472636815920398,\n \"acc_norm_stderr\": 0.03519702717576915\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526094,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526094\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3614457831325301,\n \"acc_stderr\": 0.037400593820293204,\n \"acc_norm\": 0.3614457831325301,\n \"acc_norm_stderr\": 0.037400593820293204\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6023391812865497,\n \"acc_stderr\": 0.03753638955761691,\n \"acc_norm\": 0.6023391812865497,\n \"acc_norm_stderr\": 0.03753638955761691\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.30599755201958384,\n \"mc1_stderr\": 0.016132229728155045,\n \"mc2\": 0.5552021116757884,\n \"mc2_stderr\": 0.01659507343053494\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7071823204419889,\n \"acc_stderr\": 0.012789321118542616\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/occultml/Helios-10.7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-17-31.612101.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["**/details_harness|winogrande|5_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-17-31.612101.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_17_31.612101", "path": ["results_2024-01-04T12-17-31.612101.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-17-31.612101.parquet"]}]}]}
2024-01-04T12:20:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of occultml/Helios-10.7B Dataset automatically created during the evaluation run of model occultml/Helios-10.7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:17:31.612101(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of occultml/Helios-10.7B\n\n\n\nDataset automatically created during the evaluation run of model occultml/Helios-10.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:17:31.612101(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of occultml/Helios-10.7B\n\n\n\nDataset automatically created during the evaluation run of model occultml/Helios-10.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:17:31.612101(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of occultml/Helios-10.7B\n\n\n\nDataset automatically created during the evaluation run of model occultml/Helios-10.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:17:31.612101(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
7d99060ecd512e0afe8e65fa93baf43b5d0e0870
# Dataset Card for Evaluation run of SanjiWatsuki/Sonya-7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [SanjiWatsuki/Sonya-7B](https://huggingface.co/SanjiWatsuki/Sonya-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_SanjiWatsuki__Sonya-7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:19:11.557285](https://huggingface.co/datasets/open-llm-leaderboard/details_SanjiWatsuki__Sonya-7B/blob/main/results_2024-01-04T12-19-11.557285.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6295603583504984, "acc_stderr": 0.03260130617219878, "acc_norm": 0.631279318940089, "acc_norm_stderr": 0.033255643815587696, "mc1": 0.42105263157894735, "mc1_stderr": 0.017283936248136497, "mc2": 0.6122404849853685, "mc2_stderr": 0.015536091308816523 }, "harness|arc:challenge|25": { "acc": 0.6177474402730375, "acc_stderr": 0.014200454049979274, "acc_norm": 0.6459044368600683, "acc_norm_stderr": 0.013975454122756557 }, "harness|hellaswag|10": { "acc": 0.6644094801832304, "acc_stderr": 0.004712314511950968, "acc_norm": 0.8511252738498307, "acc_norm_stderr": 0.003552374531305201 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6, "acc_stderr": 0.042320736951515885, "acc_norm": 0.6, "acc_norm_stderr": 0.042320736951515885 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6578947368421053, "acc_stderr": 0.03860731599316092, "acc_norm": 0.6578947368421053, "acc_norm_stderr": 0.03860731599316092 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6981132075471698, "acc_stderr": 0.02825420034443866, "acc_norm": 0.6981132075471698, "acc_norm_stderr": 0.02825420034443866 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7013888888888888, "acc_stderr": 0.03827052357950756, "acc_norm": 0.7013888888888888, "acc_norm_stderr": 0.03827052357950756 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6358381502890174, "acc_stderr": 0.03669072477416907, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.03669072477416907 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287533, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287533 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768077, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.03223276266711712, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.03223276266711712 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.047028804320496165, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.047028804320496165 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5172413793103449, "acc_stderr": 0.04164188720169375, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.04164188720169375 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3915343915343915, "acc_stderr": 0.025138091388851105, "acc_norm": 0.3915343915343915, "acc_norm_stderr": 0.025138091388851105 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7677419354838709, "acc_stderr": 0.024022256130308235, "acc_norm": 0.7677419354838709, "acc_norm_stderr": 0.024022256130308235 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.03287666758603489, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.03287666758603489 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586808, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586808 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.023381935348121427, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.023381935348121427 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6410256410256411, "acc_stderr": 0.024321738484602354, "acc_norm": 0.6410256410256411, "acc_norm_stderr": 0.024321738484602354 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.02882088466625326, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.02882088466625326 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6512605042016807, "acc_stderr": 0.030956636328566548, "acc_norm": 0.6512605042016807, "acc_norm_stderr": 0.030956636328566548 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8348623853211009, "acc_stderr": 0.015919557829976037, "acc_norm": 0.8348623853211009, "acc_norm_stderr": 0.015919557829976037 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49537037037037035, "acc_stderr": 0.03409825519163572, "acc_norm": 0.49537037037037035, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8137254901960784, "acc_stderr": 0.02732547096671632, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.02732547096671632 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8185654008438819, "acc_stderr": 0.025085961144579654, "acc_norm": 0.8185654008438819, "acc_norm_stderr": 0.025085961144579654 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057222, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057222 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7175572519083969, "acc_stderr": 0.03948406125768361, "acc_norm": 0.7175572519083969, "acc_norm_stderr": 0.03948406125768361 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.03896878985070417, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.023086635086841403, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.023086635086841403 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8135376756066411, "acc_stderr": 0.013927751372001505, "acc_norm": 0.8135376756066411, "acc_norm_stderr": 0.013927751372001505 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7052023121387283, "acc_stderr": 0.024547617794803828, "acc_norm": 0.7052023121387283, "acc_norm_stderr": 0.024547617794803828 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.37206703910614525, "acc_stderr": 0.016165847583563295, "acc_norm": 0.37206703910614525, "acc_norm_stderr": 0.016165847583563295 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6764705882352942, "acc_stderr": 0.026787453111906508, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.026787453111906508 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7009646302250804, "acc_stderr": 0.026003301117885135, "acc_norm": 0.7009646302250804, "acc_norm_stderr": 0.026003301117885135 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7098765432098766, "acc_stderr": 0.025251173936495036, "acc_norm": 0.7098765432098766, "acc_norm_stderr": 0.025251173936495036 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4219858156028369, "acc_stderr": 0.02946218923337059, "acc_norm": 0.4219858156028369, "acc_norm_stderr": 0.02946218923337059 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47392438070404175, "acc_stderr": 0.012752858346533126, "acc_norm": 0.47392438070404175, "acc_norm_stderr": 0.012752858346533126 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6580882352941176, "acc_stderr": 0.028814722422254187, "acc_norm": 0.6580882352941176, "acc_norm_stderr": 0.028814722422254187 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6274509803921569, "acc_stderr": 0.01955964680921593, "acc_norm": 0.6274509803921569, "acc_norm_stderr": 0.01955964680921593 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7142857142857143, "acc_stderr": 0.028920583220675602, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.028920583220675602 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5180722891566265, "acc_stderr": 0.03889951252827216, "acc_norm": 0.5180722891566265, "acc_norm_stderr": 0.03889951252827216 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7953216374269005, "acc_stderr": 0.03094445977853321, "acc_norm": 0.7953216374269005, "acc_norm_stderr": 0.03094445977853321 }, "harness|truthfulqa:mc|0": { "mc1": 0.42105263157894735, "mc1_stderr": 0.017283936248136497, "mc2": 0.6122404849853685, "mc2_stderr": 0.015536091308816523 }, "harness|winogrande|5": { "acc": 0.7774269928966061, "acc_stderr": 0.011690933809712666 }, "harness|gsm8k|5": { "acc": 0.5951478392721758, "acc_stderr": 0.013520817666870506 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_SanjiWatsuki__Sonya-7B
[ "region:us" ]
2024-01-04T12:21:27+00:00
{"pretty_name": "Evaluation run of SanjiWatsuki/Sonya-7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [SanjiWatsuki/Sonya-7B](https://huggingface.co/SanjiWatsuki/Sonya-7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_SanjiWatsuki__Sonya-7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:19:11.557285](https://huggingface.co/datasets/open-llm-leaderboard/details_SanjiWatsuki__Sonya-7B/blob/main/results_2024-01-04T12-19-11.557285.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6295603583504984,\n \"acc_stderr\": 0.03260130617219878,\n \"acc_norm\": 0.631279318940089,\n \"acc_norm_stderr\": 0.033255643815587696,\n \"mc1\": 0.42105263157894735,\n \"mc1_stderr\": 0.017283936248136497,\n \"mc2\": 0.6122404849853685,\n \"mc2_stderr\": 0.015536091308816523\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6177474402730375,\n \"acc_stderr\": 0.014200454049979274,\n \"acc_norm\": 0.6459044368600683,\n \"acc_norm_stderr\": 0.013975454122756557\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6644094801832304,\n \"acc_stderr\": 0.004712314511950968,\n \"acc_norm\": 0.8511252738498307,\n \"acc_norm_stderr\": 0.003552374531305201\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.042320736951515885,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.042320736951515885\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.03860731599316092,\n \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.03860731599316092\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6981132075471698,\n \"acc_stderr\": 0.02825420034443866,\n \"acc_norm\": 0.6981132075471698,\n \"acc_norm_stderr\": 0.02825420034443866\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7013888888888888,\n \"acc_stderr\": 0.03827052357950756,\n \"acc_norm\": 0.7013888888888888,\n \"acc_norm_stderr\": 0.03827052357950756\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.03669072477416907,\n \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.03669072477416907\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287533,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287533\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768077,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768077\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.047028804320496165,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.047028804320496165\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.04164188720169375,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.04164188720169375\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3915343915343915,\n \"acc_stderr\": 0.025138091388851105,\n \"acc_norm\": 0.3915343915343915,\n \"acc_norm_stderr\": 0.025138091388851105\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.03287666758603489,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.03287666758603489\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586808,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586808\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.023381935348121427,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.023381935348121427\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6410256410256411,\n \"acc_stderr\": 0.024321738484602354,\n \"acc_norm\": 0.6410256410256411,\n \"acc_norm_stderr\": 0.024321738484602354\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.02882088466625326,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.02882088466625326\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6512605042016807,\n \"acc_stderr\": 0.030956636328566548,\n \"acc_norm\": 0.6512605042016807,\n \"acc_norm_stderr\": 0.030956636328566548\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8348623853211009,\n \"acc_stderr\": 0.015919557829976037,\n \"acc_norm\": 0.8348623853211009,\n \"acc_norm_stderr\": 0.015919557829976037\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49537037037037035,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.49537037037037035,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8137254901960784,\n \"acc_stderr\": 0.02732547096671632,\n \"acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.02732547096671632\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8185654008438819,\n \"acc_stderr\": 0.025085961144579654,\n \"acc_norm\": 0.8185654008438819,\n \"acc_norm_stderr\": 0.025085961144579654\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057222,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057222\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7175572519083969,\n \"acc_stderr\": 0.03948406125768361,\n \"acc_norm\": 0.7175572519083969,\n \"acc_norm_stderr\": 0.03948406125768361\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.023086635086841403,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.023086635086841403\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8135376756066411,\n \"acc_stderr\": 0.013927751372001505,\n \"acc_norm\": 0.8135376756066411,\n \"acc_norm_stderr\": 0.013927751372001505\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7052023121387283,\n \"acc_stderr\": 0.024547617794803828,\n \"acc_norm\": 0.7052023121387283,\n \"acc_norm_stderr\": 0.024547617794803828\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.37206703910614525,\n \"acc_stderr\": 0.016165847583563295,\n \"acc_norm\": 0.37206703910614525,\n \"acc_norm_stderr\": 0.016165847583563295\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.026787453111906508,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.026787453111906508\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7009646302250804,\n \"acc_stderr\": 0.026003301117885135,\n \"acc_norm\": 0.7009646302250804,\n \"acc_norm_stderr\": 0.026003301117885135\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7098765432098766,\n \"acc_stderr\": 0.025251173936495036,\n \"acc_norm\": 0.7098765432098766,\n \"acc_norm_stderr\": 0.025251173936495036\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4219858156028369,\n \"acc_stderr\": 0.02946218923337059,\n \"acc_norm\": 0.4219858156028369,\n \"acc_norm_stderr\": 0.02946218923337059\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47392438070404175,\n \"acc_stderr\": 0.012752858346533126,\n \"acc_norm\": 0.47392438070404175,\n \"acc_norm_stderr\": 0.012752858346533126\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6580882352941176,\n \"acc_stderr\": 0.028814722422254187,\n \"acc_norm\": 0.6580882352941176,\n \"acc_norm_stderr\": 0.028814722422254187\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6274509803921569,\n \"acc_stderr\": 0.01955964680921593,\n \"acc_norm\": 0.6274509803921569,\n \"acc_norm_stderr\": 0.01955964680921593\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.028920583220675602,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.028920583220675602\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5180722891566265,\n \"acc_stderr\": 0.03889951252827216,\n \"acc_norm\": 0.5180722891566265,\n \"acc_norm_stderr\": 0.03889951252827216\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7953216374269005,\n \"acc_stderr\": 0.03094445977853321,\n \"acc_norm\": 0.7953216374269005,\n \"acc_norm_stderr\": 0.03094445977853321\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.42105263157894735,\n \"mc1_stderr\": 0.017283936248136497,\n \"mc2\": 0.6122404849853685,\n \"mc2_stderr\": 0.015536091308816523\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7774269928966061,\n \"acc_stderr\": 0.011690933809712666\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5951478392721758,\n \"acc_stderr\": 0.013520817666870506\n }\n}\n```", "repo_url": "https://huggingface.co/SanjiWatsuki/Sonya-7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-19-11.557285.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["**/details_harness|winogrande|5_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-19-11.557285.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_19_11.557285", "path": ["results_2024-01-04T12-19-11.557285.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-19-11.557285.parquet"]}]}]}
2024-01-04T12:21:50+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of SanjiWatsuki/Sonya-7B Dataset automatically created during the evaluation run of model SanjiWatsuki/Sonya-7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:19:11.557285(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of SanjiWatsuki/Sonya-7B\n\n\n\nDataset automatically created during the evaluation run of model SanjiWatsuki/Sonya-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:19:11.557285(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of SanjiWatsuki/Sonya-7B\n\n\n\nDataset automatically created during the evaluation run of model SanjiWatsuki/Sonya-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:19:11.557285(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of SanjiWatsuki/Sonya-7B\n\n\n\nDataset automatically created during the evaluation run of model SanjiWatsuki/Sonya-7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:19:11.557285(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
3dcecb9674056a0288d7ea11f644a5db0aa4c623
# Dataset Card for Evaluation run of samir-fama/SamirGPT-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [samir-fama/SamirGPT-v1](https://huggingface.co/samir-fama/SamirGPT-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_samir-fama__SamirGPT-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:19:15.749387](https://huggingface.co/datasets/open-llm-leaderboard/details_samir-fama__SamirGPT-v1/blob/main/results_2024-01-04T12-19-15.749387.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6575352236651422, "acc_stderr": 0.031966900177508965, "acc_norm": 0.6573567440981961, "acc_norm_stderr": 0.032629186193667725, "mc1": 0.4724602203182375, "mc1_stderr": 0.017476930190712187, "mc2": 0.6336566833570767, "mc2_stderr": 0.015069694569619901 }, "harness|arc:challenge|25": { "acc": 0.6672354948805461, "acc_stderr": 0.013769863046192309, "acc_norm": 0.6953924914675768, "acc_norm_stderr": 0.013449522109932489 }, "harness|hellaswag|10": { "acc": 0.6901015733917546, "acc_stderr": 0.004615063817741859, "acc_norm": 0.870444134634535, "acc_norm_stderr": 0.00335127840339241 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.04094376269996792, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.04094376269996792 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7320754716981132, "acc_stderr": 0.027257260322494845, "acc_norm": 0.7320754716981132, "acc_norm_stderr": 0.027257260322494845 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.050161355804659205, "acc_norm": 0.53, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6763005780346821, "acc_stderr": 0.035676037996391706, "acc_norm": 0.6763005780346821, "acc_norm_stderr": 0.035676037996391706 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909282, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909282 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6085106382978723, "acc_stderr": 0.03190701242326812, "acc_norm": 0.6085106382978723, "acc_norm_stderr": 0.03190701242326812 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42328042328042326, "acc_stderr": 0.025446365634406783, "acc_norm": 0.42328042328042326, "acc_norm_stderr": 0.025446365634406783 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4603174603174603, "acc_stderr": 0.04458029125470973, "acc_norm": 0.4603174603174603, "acc_norm_stderr": 0.04458029125470973 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.032568666616811015, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.032568666616811015 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033456, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033456 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6846153846153846, "acc_stderr": 0.023559646983189936, "acc_norm": 0.6846153846153846, "acc_norm_stderr": 0.023559646983189936 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3851851851851852, "acc_stderr": 0.029670906124630872, "acc_norm": 0.3851851851851852, "acc_norm_stderr": 0.029670906124630872 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6932773109243697, "acc_stderr": 0.02995382389188704, "acc_norm": 0.6932773109243697, "acc_norm_stderr": 0.02995382389188704 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8403669724770643, "acc_stderr": 0.015703498348461783, "acc_norm": 0.8403669724770643, "acc_norm_stderr": 0.015703498348461783 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5277777777777778, "acc_stderr": 0.0340470532865388, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02552472232455334, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02552472232455334 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.025530100460233494, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.025530100460233494 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7669902912621359, "acc_stderr": 0.04185832598928315, "acc_norm": 0.7669902912621359, "acc_norm_stderr": 0.04185832598928315 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406964, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406964 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8339719029374202, "acc_stderr": 0.0133064782430663, "acc_norm": 0.8339719029374202, "acc_norm_stderr": 0.0133064782430663 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7485549132947977, "acc_stderr": 0.02335736578587403, "acc_norm": 0.7485549132947977, "acc_norm_stderr": 0.02335736578587403 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4324022346368715, "acc_stderr": 0.016568971233548606, "acc_norm": 0.4324022346368715, "acc_norm_stderr": 0.016568971233548606 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7352941176470589, "acc_stderr": 0.02526169121972948, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.02526169121972948 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.02982074719142248, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.02982074719142248 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46284224250325945, "acc_stderr": 0.01273492357953207, "acc_norm": 0.46284224250325945, "acc_norm_stderr": 0.01273492357953207 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.02833295951403121, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.02833295951403121 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6781045751633987, "acc_stderr": 0.018901015322093092, "acc_norm": 0.6781045751633987, "acc_norm_stderr": 0.018901015322093092 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8557213930348259, "acc_stderr": 0.024845753212306053, "acc_norm": 0.8557213930348259, "acc_norm_stderr": 0.024845753212306053 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.034873508801977704, "acc_norm": 0.86, "acc_norm_stderr": 0.034873508801977704 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8421052631578947, "acc_stderr": 0.027966785859160896, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.027966785859160896 }, "harness|truthfulqa:mc|0": { "mc1": 0.4724602203182375, "mc1_stderr": 0.017476930190712187, "mc2": 0.6336566833570767, "mc2_stderr": 0.015069694569619901 }, "harness|winogrande|5": { "acc": 0.8168902920284136, "acc_stderr": 0.010869778633168374 }, "harness|gsm8k|5": { "acc": 0.7172100075815011, "acc_stderr": 0.012405020417873619 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_samir-fama__SamirGPT-v1
[ "region:us" ]
2024-01-04T12:21:29+00:00
{"pretty_name": "Evaluation run of samir-fama/SamirGPT-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [samir-fama/SamirGPT-v1](https://huggingface.co/samir-fama/SamirGPT-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_samir-fama__SamirGPT-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:19:15.749387](https://huggingface.co/datasets/open-llm-leaderboard/details_samir-fama__SamirGPT-v1/blob/main/results_2024-01-04T12-19-15.749387.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6575352236651422,\n \"acc_stderr\": 0.031966900177508965,\n \"acc_norm\": 0.6573567440981961,\n \"acc_norm_stderr\": 0.032629186193667725,\n \"mc1\": 0.4724602203182375,\n \"mc1_stderr\": 0.017476930190712187,\n \"mc2\": 0.6336566833570767,\n \"mc2_stderr\": 0.015069694569619901\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6672354948805461,\n \"acc_stderr\": 0.013769863046192309,\n \"acc_norm\": 0.6953924914675768,\n \"acc_norm_stderr\": 0.013449522109932489\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6901015733917546,\n \"acc_stderr\": 0.004615063817741859,\n \"acc_norm\": 0.870444134634535,\n \"acc_norm_stderr\": 0.00335127840339241\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.04094376269996792,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.04094376269996792\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7320754716981132,\n \"acc_stderr\": 0.027257260322494845,\n \"acc_norm\": 0.7320754716981132,\n \"acc_norm_stderr\": 0.027257260322494845\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6763005780346821,\n \"acc_stderr\": 0.035676037996391706,\n \"acc_norm\": 0.6763005780346821,\n \"acc_norm_stderr\": 0.035676037996391706\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909282,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909282\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6085106382978723,\n \"acc_stderr\": 0.03190701242326812,\n \"acc_norm\": 0.6085106382978723,\n \"acc_norm_stderr\": 0.03190701242326812\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42328042328042326,\n \"acc_stderr\": 0.025446365634406783,\n \"acc_norm\": 0.42328042328042326,\n \"acc_norm_stderr\": 0.025446365634406783\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4603174603174603,\n \"acc_stderr\": 0.04458029125470973,\n \"acc_norm\": 0.4603174603174603,\n \"acc_norm_stderr\": 0.04458029125470973\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.032568666616811015,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.032568666616811015\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6846153846153846,\n \"acc_stderr\": 0.023559646983189936,\n \"acc_norm\": 0.6846153846153846,\n \"acc_norm_stderr\": 0.023559646983189936\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3851851851851852,\n \"acc_stderr\": 0.029670906124630872,\n \"acc_norm\": 0.3851851851851852,\n \"acc_norm_stderr\": 0.029670906124630872\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6932773109243697,\n \"acc_stderr\": 0.02995382389188704,\n \"acc_norm\": 0.6932773109243697,\n \"acc_norm_stderr\": 0.02995382389188704\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8403669724770643,\n \"acc_stderr\": 0.015703498348461783,\n \"acc_norm\": 0.8403669724770643,\n \"acc_norm_stderr\": 0.015703498348461783\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455334,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455334\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.025530100460233494,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.025530100460233494\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7669902912621359,\n \"acc_stderr\": 0.04185832598928315,\n \"acc_norm\": 0.7669902912621359,\n \"acc_norm_stderr\": 0.04185832598928315\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8339719029374202,\n \"acc_stderr\": 0.0133064782430663,\n \"acc_norm\": 0.8339719029374202,\n \"acc_norm_stderr\": 0.0133064782430663\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7485549132947977,\n \"acc_stderr\": 0.02335736578587403,\n \"acc_norm\": 0.7485549132947977,\n \"acc_norm_stderr\": 0.02335736578587403\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4324022346368715,\n \"acc_stderr\": 0.016568971233548606,\n \"acc_norm\": 0.4324022346368715,\n \"acc_norm_stderr\": 0.016568971233548606\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.02526169121972948,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.02526169121972948\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.02982074719142248,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.02982074719142248\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46284224250325945,\n \"acc_stderr\": 0.01273492357953207,\n \"acc_norm\": 0.46284224250325945,\n \"acc_norm_stderr\": 0.01273492357953207\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.02833295951403121,\n \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.02833295951403121\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6781045751633987,\n \"acc_stderr\": 0.018901015322093092,\n \"acc_norm\": 0.6781045751633987,\n \"acc_norm_stderr\": 0.018901015322093092\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8557213930348259,\n \"acc_stderr\": 0.024845753212306053,\n \"acc_norm\": 0.8557213930348259,\n \"acc_norm_stderr\": 0.024845753212306053\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.034873508801977704,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.034873508801977704\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.027966785859160896,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.027966785859160896\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4724602203182375,\n \"mc1_stderr\": 0.017476930190712187,\n \"mc2\": 0.6336566833570767,\n \"mc2_stderr\": 0.015069694569619901\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8168902920284136,\n \"acc_stderr\": 0.010869778633168374\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7172100075815011,\n \"acc_stderr\": 0.012405020417873619\n }\n}\n```", "repo_url": "https://huggingface.co/samir-fama/SamirGPT-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-19-15.749387.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["**/details_harness|winogrande|5_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-19-15.749387.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_19_15.749387", "path": ["results_2024-01-04T12-19-15.749387.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-19-15.749387.parquet"]}]}]}
2024-01-04T12:21:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of samir-fama/SamirGPT-v1 Dataset automatically created during the evaluation run of model samir-fama/SamirGPT-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:19:15.749387(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of samir-fama/SamirGPT-v1\n\n\n\nDataset automatically created during the evaluation run of model samir-fama/SamirGPT-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:19:15.749387(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of samir-fama/SamirGPT-v1\n\n\n\nDataset automatically created during the evaluation run of model samir-fama/SamirGPT-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:19:15.749387(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of samir-fama/SamirGPT-v1\n\n\n\nDataset automatically created during the evaluation run of model samir-fama/SamirGPT-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:19:15.749387(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
bda3e69fb1c19bdc71e5ea60b174a85c32ff0fcf
# Dataset Card for Evaluation run of occultml/Helios-10.7B-v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [occultml/Helios-10.7B-v2](https://huggingface.co/occultml/Helios-10.7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_occultml__Helios-10.7B-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:23:10.136079](https://huggingface.co/datasets/open-llm-leaderboard/details_occultml__Helios-10.7B-v2/blob/main/results_2024-01-04T12-23-10.136079.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.4114274647380307, "acc_stderr": 0.034055832181383035, "acc_norm": 0.41611952976407623, "acc_norm_stderr": 0.03500219697159756, "mc1": 0.3047735618115055, "mc1_stderr": 0.016114124156882455, "mc2": 0.5550965546640495, "mc2_stderr": 0.016601840091756987 }, "harness|arc:challenge|25": { "acc": 0.35494880546075086, "acc_stderr": 0.013983036904094095, "acc_norm": 0.3916382252559727, "acc_norm_stderr": 0.014264122124938213 }, "harness|hellaswag|10": { "acc": 0.34266082453694485, "acc_stderr": 0.004736292355716404, "acc_norm": 0.46634136626170086, "acc_norm_stderr": 0.0049784626909669255 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4740740740740741, "acc_stderr": 0.04313531696750575, "acc_norm": 0.4740740740740741, "acc_norm_stderr": 0.04313531696750575 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4605263157894737, "acc_stderr": 0.04056242252249034, "acc_norm": 0.4605263157894737, "acc_norm_stderr": 0.04056242252249034 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001974, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001974 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.43018867924528303, "acc_stderr": 0.030471445867183238, "acc_norm": 0.43018867924528303, "acc_norm_stderr": 0.030471445867183238 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.3958333333333333, "acc_stderr": 0.04089465449325582, "acc_norm": 0.3958333333333333, "acc_norm_stderr": 0.04089465449325582 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.3988439306358382, "acc_stderr": 0.03733626655383509, "acc_norm": 0.3988439306358382, "acc_norm_stderr": 0.03733626655383509 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.044405219061793275, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.044405219061793275 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.34893617021276596, "acc_stderr": 0.031158522131357797, "acc_norm": 0.34893617021276596, "acc_norm_stderr": 0.031158522131357797 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3508771929824561, "acc_stderr": 0.044895393502706986, "acc_norm": 0.3508771929824561, "acc_norm_stderr": 0.044895393502706986 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.31724137931034485, "acc_stderr": 0.038783523721386215, "acc_norm": 0.31724137931034485, "acc_norm_stderr": 0.038783523721386215 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.022569897074918428, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.022569897074918428 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.24603174603174602, "acc_stderr": 0.03852273364924315, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.03852273364924315 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.4870967741935484, "acc_stderr": 0.02843453315268186, "acc_norm": 0.4870967741935484, "acc_norm_stderr": 0.02843453315268186 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.39408866995073893, "acc_stderr": 0.034381579670365446, "acc_norm": 0.39408866995073893, "acc_norm_stderr": 0.034381579670365446 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.4666666666666667, "acc_stderr": 0.03895658065271847, "acc_norm": 0.4666666666666667, "acc_norm_stderr": 0.03895658065271847 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.4696969696969697, "acc_stderr": 0.03555804051763929, "acc_norm": 0.4696969696969697, "acc_norm_stderr": 0.03555804051763929 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.5077720207253886, "acc_stderr": 0.036080032255696545, "acc_norm": 0.5077720207253886, "acc_norm_stderr": 0.036080032255696545 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3769230769230769, "acc_stderr": 0.024570975364225995, "acc_norm": 0.3769230769230769, "acc_norm_stderr": 0.024570975364225995 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2851851851851852, "acc_stderr": 0.027528599210340492, "acc_norm": 0.2851851851851852, "acc_norm_stderr": 0.027528599210340492 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.42016806722689076, "acc_stderr": 0.03206183783236152, "acc_norm": 0.42016806722689076, "acc_norm_stderr": 0.03206183783236152 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2582781456953642, "acc_stderr": 0.035737053147634576, "acc_norm": 0.2582781456953642, "acc_norm_stderr": 0.035737053147634576 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.4917431192660551, "acc_stderr": 0.021434399918214338, "acc_norm": 0.4917431192660551, "acc_norm_stderr": 0.021434399918214338 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.24537037037037038, "acc_stderr": 0.029346665094372937, "acc_norm": 0.24537037037037038, "acc_norm_stderr": 0.029346665094372937 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.39215686274509803, "acc_stderr": 0.03426712349247271, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.03426712349247271 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.48523206751054854, "acc_stderr": 0.032533028078777386, "acc_norm": 0.48523206751054854, "acc_norm_stderr": 0.032533028078777386 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.452914798206278, "acc_stderr": 0.033408675019233246, "acc_norm": 0.452914798206278, "acc_norm_stderr": 0.033408675019233246 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.45038167938931295, "acc_stderr": 0.04363643698524779, "acc_norm": 0.45038167938931295, "acc_norm_stderr": 0.04363643698524779 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6115702479338843, "acc_stderr": 0.04449270350068382, "acc_norm": 0.6115702479338843, "acc_norm_stderr": 0.04449270350068382 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5462962962962963, "acc_stderr": 0.04812917324536823, "acc_norm": 0.5462962962962963, "acc_norm_stderr": 0.04812917324536823 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.49693251533742333, "acc_stderr": 0.03928297078179663, "acc_norm": 0.49693251533742333, "acc_norm_stderr": 0.03928297078179663 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.33035714285714285, "acc_stderr": 0.04464285714285714, "acc_norm": 0.33035714285714285, "acc_norm_stderr": 0.04464285714285714 }, "harness|hendrycksTest-management|5": { "acc": 0.5048543689320388, "acc_stderr": 0.04950504382128919, "acc_norm": 0.5048543689320388, "acc_norm_stderr": 0.04950504382128919 }, "harness|hendrycksTest-marketing|5": { "acc": 0.5299145299145299, "acc_stderr": 0.03269741106812444, "acc_norm": 0.5299145299145299, "acc_norm_stderr": 0.03269741106812444 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5593869731800766, "acc_stderr": 0.017753396973908493, "acc_norm": 0.5593869731800766, "acc_norm_stderr": 0.017753396973908493 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5289017341040463, "acc_stderr": 0.026874085883518348, "acc_norm": 0.5289017341040463, "acc_norm_stderr": 0.026874085883518348 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.19664804469273742, "acc_stderr": 0.013293183027454641, "acc_norm": 0.19664804469273742, "acc_norm_stderr": 0.013293183027454641 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4934640522875817, "acc_stderr": 0.028627470550556054, "acc_norm": 0.4934640522875817, "acc_norm_stderr": 0.028627470550556054 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.572347266881029, "acc_stderr": 0.028099240775809553, "acc_norm": 0.572347266881029, "acc_norm_stderr": 0.028099240775809553 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.49074074074074076, "acc_stderr": 0.027815973433878014, "acc_norm": 0.49074074074074076, "acc_norm_stderr": 0.027815973433878014 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2978723404255319, "acc_stderr": 0.02728160834446942, "acc_norm": 0.2978723404255319, "acc_norm_stderr": 0.02728160834446942 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2900912646675359, "acc_stderr": 0.011590375554733093, "acc_norm": 0.2900912646675359, "acc_norm_stderr": 0.011590375554733093 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.2757352941176471, "acc_stderr": 0.02714627193662517, "acc_norm": 0.2757352941176471, "acc_norm_stderr": 0.02714627193662517 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4624183006535948, "acc_stderr": 0.02017061497496978, "acc_norm": 0.4624183006535948, "acc_norm_stderr": 0.02017061497496978 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5, "acc_stderr": 0.04789131426105757, "acc_norm": 0.5, "acc_norm_stderr": 0.04789131426105757 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.3836734693877551, "acc_stderr": 0.03113088039623593, "acc_norm": 0.3836734693877551, "acc_norm_stderr": 0.03113088039623593 }, "harness|hendrycksTest-sociology|5": { "acc": 0.5522388059701493, "acc_stderr": 0.03516184772952167, "acc_norm": 0.5522388059701493, "acc_norm_stderr": 0.03516184772952167 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.67, "acc_stderr": 0.047258156262526094, "acc_norm": 0.67, "acc_norm_stderr": 0.047258156262526094 }, "harness|hendrycksTest-virology|5": { "acc": 0.3614457831325301, "acc_stderr": 0.037400593820293204, "acc_norm": 0.3614457831325301, "acc_norm_stderr": 0.037400593820293204 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6081871345029239, "acc_stderr": 0.037439798259264, "acc_norm": 0.6081871345029239, "acc_norm_stderr": 0.037439798259264 }, "harness|truthfulqa:mc|0": { "mc1": 0.3047735618115055, "mc1_stderr": 0.016114124156882455, "mc2": 0.5550965546640495, "mc2_stderr": 0.016601840091756987 }, "harness|winogrande|5": { "acc": 0.7063930544593529, "acc_stderr": 0.012799397296204182 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_occultml__Helios-10.7B-v2
[ "region:us" ]
2024-01-04T12:25:33+00:00
{"pretty_name": "Evaluation run of occultml/Helios-10.7B-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [occultml/Helios-10.7B-v2](https://huggingface.co/occultml/Helios-10.7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_occultml__Helios-10.7B-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:23:10.136079](https://huggingface.co/datasets/open-llm-leaderboard/details_occultml__Helios-10.7B-v2/blob/main/results_2024-01-04T12-23-10.136079.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.4114274647380307,\n \"acc_stderr\": 0.034055832181383035,\n \"acc_norm\": 0.41611952976407623,\n \"acc_norm_stderr\": 0.03500219697159756,\n \"mc1\": 0.3047735618115055,\n \"mc1_stderr\": 0.016114124156882455,\n \"mc2\": 0.5550965546640495,\n \"mc2_stderr\": 0.016601840091756987\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.35494880546075086,\n \"acc_stderr\": 0.013983036904094095,\n \"acc_norm\": 0.3916382252559727,\n \"acc_norm_stderr\": 0.014264122124938213\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.34266082453694485,\n \"acc_stderr\": 0.004736292355716404,\n \"acc_norm\": 0.46634136626170086,\n \"acc_norm_stderr\": 0.0049784626909669255\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4740740740740741,\n \"acc_stderr\": 0.04313531696750575,\n \"acc_norm\": 0.4740740740740741,\n \"acc_norm_stderr\": 0.04313531696750575\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4605263157894737,\n \"acc_stderr\": 0.04056242252249034,\n \"acc_norm\": 0.4605263157894737,\n \"acc_norm_stderr\": 0.04056242252249034\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001974,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001974\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.43018867924528303,\n \"acc_stderr\": 0.030471445867183238,\n \"acc_norm\": 0.43018867924528303,\n \"acc_norm_stderr\": 0.030471445867183238\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.3958333333333333,\n \"acc_stderr\": 0.04089465449325582,\n \"acc_norm\": 0.3958333333333333,\n \"acc_norm_stderr\": 0.04089465449325582\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.3988439306358382,\n \"acc_stderr\": 0.03733626655383509,\n \"acc_norm\": 0.3988439306358382,\n \"acc_norm_stderr\": 0.03733626655383509\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.044405219061793275,\n \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.044405219061793275\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.34893617021276596,\n \"acc_stderr\": 0.031158522131357797,\n \"acc_norm\": 0.34893617021276596,\n \"acc_norm_stderr\": 0.031158522131357797\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3508771929824561,\n \"acc_stderr\": 0.044895393502706986,\n \"acc_norm\": 0.3508771929824561,\n \"acc_norm_stderr\": 0.044895393502706986\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.31724137931034485,\n \"acc_stderr\": 0.038783523721386215,\n \"acc_norm\": 0.31724137931034485,\n \"acc_norm_stderr\": 0.038783523721386215\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.022569897074918428,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.022569897074918428\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.24603174603174602,\n \"acc_stderr\": 0.03852273364924315,\n \"acc_norm\": 0.24603174603174602,\n \"acc_norm_stderr\": 0.03852273364924315\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.4870967741935484,\n \"acc_stderr\": 0.02843453315268186,\n \"acc_norm\": 0.4870967741935484,\n \"acc_norm_stderr\": 0.02843453315268186\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.39408866995073893,\n \"acc_stderr\": 0.034381579670365446,\n \"acc_norm\": 0.39408866995073893,\n \"acc_norm_stderr\": 0.034381579670365446\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.4666666666666667,\n \"acc_stderr\": 0.03895658065271847,\n \"acc_norm\": 0.4666666666666667,\n \"acc_norm_stderr\": 0.03895658065271847\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.4696969696969697,\n \"acc_stderr\": 0.03555804051763929,\n \"acc_norm\": 0.4696969696969697,\n \"acc_norm_stderr\": 0.03555804051763929\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.5077720207253886,\n \"acc_stderr\": 0.036080032255696545,\n \"acc_norm\": 0.5077720207253886,\n \"acc_norm_stderr\": 0.036080032255696545\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.3769230769230769,\n \"acc_stderr\": 0.024570975364225995,\n \"acc_norm\": 0.3769230769230769,\n \"acc_norm_stderr\": 0.024570975364225995\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2851851851851852,\n \"acc_stderr\": 0.027528599210340492,\n \"acc_norm\": 0.2851851851851852,\n \"acc_norm_stderr\": 0.027528599210340492\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.42016806722689076,\n \"acc_stderr\": 0.03206183783236152,\n \"acc_norm\": 0.42016806722689076,\n \"acc_norm_stderr\": 0.03206183783236152\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2582781456953642,\n \"acc_stderr\": 0.035737053147634576,\n \"acc_norm\": 0.2582781456953642,\n \"acc_norm_stderr\": 0.035737053147634576\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.4917431192660551,\n \"acc_stderr\": 0.021434399918214338,\n \"acc_norm\": 0.4917431192660551,\n \"acc_norm_stderr\": 0.021434399918214338\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.24537037037037038,\n \"acc_stderr\": 0.029346665094372937,\n \"acc_norm\": 0.24537037037037038,\n \"acc_norm_stderr\": 0.029346665094372937\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.03426712349247271,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.03426712349247271\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.48523206751054854,\n \"acc_stderr\": 0.032533028078777386,\n \"acc_norm\": 0.48523206751054854,\n \"acc_norm_stderr\": 0.032533028078777386\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.452914798206278,\n \"acc_stderr\": 0.033408675019233246,\n \"acc_norm\": 0.452914798206278,\n \"acc_norm_stderr\": 0.033408675019233246\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.45038167938931295,\n \"acc_stderr\": 0.04363643698524779,\n \"acc_norm\": 0.45038167938931295,\n \"acc_norm_stderr\": 0.04363643698524779\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6115702479338843,\n \"acc_stderr\": 0.04449270350068382,\n \"acc_norm\": 0.6115702479338843,\n \"acc_norm_stderr\": 0.04449270350068382\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5462962962962963,\n \"acc_stderr\": 0.04812917324536823,\n \"acc_norm\": 0.5462962962962963,\n \"acc_norm_stderr\": 0.04812917324536823\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.49693251533742333,\n \"acc_stderr\": 0.03928297078179663,\n \"acc_norm\": 0.49693251533742333,\n \"acc_norm_stderr\": 0.03928297078179663\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.33035714285714285,\n \"acc_stderr\": 0.04464285714285714,\n \"acc_norm\": 0.33035714285714285,\n \"acc_norm_stderr\": 0.04464285714285714\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.5048543689320388,\n \"acc_stderr\": 0.04950504382128919,\n \"acc_norm\": 0.5048543689320388,\n \"acc_norm_stderr\": 0.04950504382128919\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.5299145299145299,\n \"acc_stderr\": 0.03269741106812444,\n \"acc_norm\": 0.5299145299145299,\n \"acc_norm_stderr\": 0.03269741106812444\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5593869731800766,\n \"acc_stderr\": 0.017753396973908493,\n \"acc_norm\": 0.5593869731800766,\n \"acc_norm_stderr\": 0.017753396973908493\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5289017341040463,\n \"acc_stderr\": 0.026874085883518348,\n \"acc_norm\": 0.5289017341040463,\n \"acc_norm_stderr\": 0.026874085883518348\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.19664804469273742,\n \"acc_stderr\": 0.013293183027454641,\n \"acc_norm\": 0.19664804469273742,\n \"acc_norm_stderr\": 0.013293183027454641\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.4934640522875817,\n \"acc_stderr\": 0.028627470550556054,\n \"acc_norm\": 0.4934640522875817,\n \"acc_norm_stderr\": 0.028627470550556054\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.572347266881029,\n \"acc_stderr\": 0.028099240775809553,\n \"acc_norm\": 0.572347266881029,\n \"acc_norm_stderr\": 0.028099240775809553\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.49074074074074076,\n \"acc_stderr\": 0.027815973433878014,\n \"acc_norm\": 0.49074074074074076,\n \"acc_norm_stderr\": 0.027815973433878014\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2978723404255319,\n \"acc_stderr\": 0.02728160834446942,\n \"acc_norm\": 0.2978723404255319,\n \"acc_norm_stderr\": 0.02728160834446942\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2900912646675359,\n \"acc_stderr\": 0.011590375554733093,\n \"acc_norm\": 0.2900912646675359,\n \"acc_norm_stderr\": 0.011590375554733093\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.2757352941176471,\n \"acc_stderr\": 0.02714627193662517,\n \"acc_norm\": 0.2757352941176471,\n \"acc_norm_stderr\": 0.02714627193662517\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4624183006535948,\n \"acc_stderr\": 0.02017061497496978,\n \"acc_norm\": 0.4624183006535948,\n \"acc_norm_stderr\": 0.02017061497496978\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04789131426105757,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04789131426105757\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.3836734693877551,\n \"acc_stderr\": 0.03113088039623593,\n \"acc_norm\": 0.3836734693877551,\n \"acc_norm_stderr\": 0.03113088039623593\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.5522388059701493,\n \"acc_stderr\": 0.03516184772952167,\n \"acc_norm\": 0.5522388059701493,\n \"acc_norm_stderr\": 0.03516184772952167\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.047258156262526094,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.047258156262526094\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.3614457831325301,\n \"acc_stderr\": 0.037400593820293204,\n \"acc_norm\": 0.3614457831325301,\n \"acc_norm_stderr\": 0.037400593820293204\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6081871345029239,\n \"acc_stderr\": 0.037439798259264,\n \"acc_norm\": 0.6081871345029239,\n \"acc_norm_stderr\": 0.037439798259264\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3047735618115055,\n \"mc1_stderr\": 0.016114124156882455,\n \"mc2\": 0.5550965546640495,\n \"mc2_stderr\": 0.016601840091756987\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7063930544593529,\n \"acc_stderr\": 0.012799397296204182\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/occultml/Helios-10.7B-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-23-10.136079.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["**/details_harness|winogrande|5_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-23-10.136079.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_23_10.136079", "path": ["results_2024-01-04T12-23-10.136079.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-23-10.136079.parquet"]}]}]}
2024-01-04T12:25:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of occultml/Helios-10.7B-v2 Dataset automatically created during the evaluation run of model occultml/Helios-10.7B-v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:23:10.136079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of occultml/Helios-10.7B-v2\n\n\n\nDataset automatically created during the evaluation run of model occultml/Helios-10.7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:23:10.136079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of occultml/Helios-10.7B-v2\n\n\n\nDataset automatically created during the evaluation run of model occultml/Helios-10.7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:23:10.136079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of occultml/Helios-10.7B-v2\n\n\n\nDataset automatically created during the evaluation run of model occultml/Helios-10.7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:23:10.136079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
1d5c345ef36e39211f4908450ba8dba96ea63d23
# Dataset Card for Evaluation run of samir-fama/FernandoGPT-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [samir-fama/FernandoGPT-v1](https://huggingface.co/samir-fama/FernandoGPT-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_samir-fama__FernandoGPT-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:27:16.928261](https://huggingface.co/datasets/open-llm-leaderboard/details_samir-fama__FernandoGPT-v1/blob/main/results_2024-01-04T12-27-16.928261.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6565157141345797, "acc_stderr": 0.03209595442852185, "acc_norm": 0.6562737683441319, "acc_norm_stderr": 0.03276343682808398, "mc1": 0.4528763769889841, "mc1_stderr": 0.01742558984831402, "mc2": 0.611810271307038, "mc2_stderr": 0.015177040276543659 }, "harness|arc:challenge|25": { "acc": 0.6629692832764505, "acc_stderr": 0.01381347665290227, "acc_norm": 0.6945392491467577, "acc_norm_stderr": 0.01346008047800251 }, "harness|hellaswag|10": { "acc": 0.6838279227245568, "acc_stderr": 0.004640306719628064, "acc_norm": 0.869448317068313, "acc_norm_stderr": 0.003362208481557298 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.040943762699967926, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.040943762699967926 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7245283018867924, "acc_stderr": 0.02749566368372406, "acc_norm": 0.7245283018867924, "acc_norm_stderr": 0.02749566368372406 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5872340425531914, "acc_stderr": 0.03218471141400351, "acc_norm": 0.5872340425531914, "acc_norm_stderr": 0.03218471141400351 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.43915343915343913, "acc_stderr": 0.025559920550531006, "acc_norm": 0.43915343915343913, "acc_norm_stderr": 0.025559920550531006 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4876847290640394, "acc_stderr": 0.035169204442208966, "acc_norm": 0.4876847290640394, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267045, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267045 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.022935144053919436, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.022935144053919436 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6743589743589744, "acc_stderr": 0.02375966576741229, "acc_norm": 0.6743589743589744, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.029185714949857416, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.029185714949857416 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.030066761582977927, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.030066761582977927 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.015630022970092448, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.015630022970092448 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5324074074074074, "acc_stderr": 0.03402801581358966, "acc_norm": 0.5324074074074074, "acc_norm_stderr": 0.03402801581358966 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.025845017986926917, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.025845017986926917 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290902, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290902 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313729, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406964, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406964 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8301404853128991, "acc_stderr": 0.013428186370608304, "acc_norm": 0.8301404853128991, "acc_norm_stderr": 0.013428186370608304 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7514450867052023, "acc_stderr": 0.023267528432100174, "acc_norm": 0.7514450867052023, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4201117318435754, "acc_stderr": 0.016507671073256402, "acc_norm": 0.4201117318435754, "acc_norm_stderr": 0.016507671073256402 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137894, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137894 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7170418006430869, "acc_stderr": 0.02558306248998481, "acc_norm": 0.7170418006430869, "acc_norm_stderr": 0.02558306248998481 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712995, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712995 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.02982074719142248, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.02982074719142248 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46936114732724904, "acc_stderr": 0.012746237711716634, "acc_norm": 0.46936114732724904, "acc_norm_stderr": 0.012746237711716634 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6875, "acc_stderr": 0.02815637344037142, "acc_norm": 0.6875, "acc_norm_stderr": 0.02815637344037142 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6928104575163399, "acc_stderr": 0.018663359671463674, "acc_norm": 0.6928104575163399, "acc_norm_stderr": 0.018663359671463674 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.028263889943784593, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.028263889943784593 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169136, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169136 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.85, "acc_stderr": 0.0358870281282637, "acc_norm": 0.85, "acc_norm_stderr": 0.0358870281282637 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.4528763769889841, "mc1_stderr": 0.01742558984831402, "mc2": 0.611810271307038, "mc2_stderr": 0.015177040276543659 }, "harness|winogrande|5": { "acc": 0.8113654301499605, "acc_stderr": 0.010995172318019811 }, "harness|gsm8k|5": { "acc": 0.733131159969674, "acc_stderr": 0.012183780551887955 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_samir-fama__FernandoGPT-v1
[ "region:us" ]
2024-01-04T12:29:34+00:00
{"pretty_name": "Evaluation run of samir-fama/FernandoGPT-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [samir-fama/FernandoGPT-v1](https://huggingface.co/samir-fama/FernandoGPT-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_samir-fama__FernandoGPT-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:27:16.928261](https://huggingface.co/datasets/open-llm-leaderboard/details_samir-fama__FernandoGPT-v1/blob/main/results_2024-01-04T12-27-16.928261.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6565157141345797,\n \"acc_stderr\": 0.03209595442852185,\n \"acc_norm\": 0.6562737683441319,\n \"acc_norm_stderr\": 0.03276343682808398,\n \"mc1\": 0.4528763769889841,\n \"mc1_stderr\": 0.01742558984831402,\n \"mc2\": 0.611810271307038,\n \"mc2_stderr\": 0.015177040276543659\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6629692832764505,\n \"acc_stderr\": 0.01381347665290227,\n \"acc_norm\": 0.6945392491467577,\n \"acc_norm_stderr\": 0.01346008047800251\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6838279227245568,\n \"acc_stderr\": 0.004640306719628064,\n \"acc_norm\": 0.869448317068313,\n \"acc_norm_stderr\": 0.003362208481557298\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.040943762699967926,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.040943762699967926\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7245283018867924,\n \"acc_stderr\": 0.02749566368372406,\n \"acc_norm\": 0.7245283018867924,\n \"acc_norm_stderr\": 0.02749566368372406\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5872340425531914,\n \"acc_stderr\": 0.03218471141400351,\n \"acc_norm\": 0.5872340425531914,\n \"acc_norm_stderr\": 0.03218471141400351\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.43915343915343913,\n \"acc_stderr\": 0.025559920550531006,\n \"acc_norm\": 0.43915343915343913,\n \"acc_norm_stderr\": 0.025559920550531006\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4876847290640394,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.4876847290640394,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267045,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267045\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919436,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919436\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.029185714949857416,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.029185714949857416\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.030066761582977927,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.030066761582977927\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.015630022970092448,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.015630022970092448\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5324074074074074,\n \"acc_stderr\": 0.03402801581358966,\n \"acc_norm\": 0.5324074074074074,\n \"acc_norm_stderr\": 0.03402801581358966\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926917,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926917\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290902,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290902\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8301404853128991,\n \"acc_stderr\": 0.013428186370608304,\n \"acc_norm\": 0.8301404853128991,\n \"acc_norm_stderr\": 0.013428186370608304\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7514450867052023,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.7514450867052023,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4201117318435754,\n \"acc_stderr\": 0.016507671073256402,\n \"acc_norm\": 0.4201117318435754,\n \"acc_norm_stderr\": 0.016507671073256402\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137894,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137894\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7170418006430869,\n \"acc_stderr\": 0.02558306248998481,\n \"acc_norm\": 0.7170418006430869,\n \"acc_norm_stderr\": 0.02558306248998481\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712995,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712995\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.02982074719142248,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.02982074719142248\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46936114732724904,\n \"acc_stderr\": 0.012746237711716634,\n \"acc_norm\": 0.46936114732724904,\n \"acc_norm_stderr\": 0.012746237711716634\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.02815637344037142,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.02815637344037142\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6928104575163399,\n \"acc_stderr\": 0.018663359671463674,\n \"acc_norm\": 0.6928104575163399,\n \"acc_norm_stderr\": 0.018663359671463674\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.028263889943784593,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.028263889943784593\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.85,\n \"acc_stderr\": 0.0358870281282637,\n \"acc_norm\": 0.85,\n \"acc_norm_stderr\": 0.0358870281282637\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4528763769889841,\n \"mc1_stderr\": 0.01742558984831402,\n \"mc2\": 0.611810271307038,\n \"mc2_stderr\": 0.015177040276543659\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8113654301499605,\n \"acc_stderr\": 0.010995172318019811\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.733131159969674,\n \"acc_stderr\": 0.012183780551887955\n }\n}\n```", "repo_url": "https://huggingface.co/samir-fama/FernandoGPT-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-27-16.928261.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["**/details_harness|winogrande|5_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-27-16.928261.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_27_16.928261", "path": ["results_2024-01-04T12-27-16.928261.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-27-16.928261.parquet"]}]}]}
2024-01-04T12:30:02+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of samir-fama/FernandoGPT-v1 Dataset automatically created during the evaluation run of model samir-fama/FernandoGPT-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:27:16.928261(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of samir-fama/FernandoGPT-v1\n\n\n\nDataset automatically created during the evaluation run of model samir-fama/FernandoGPT-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:27:16.928261(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of samir-fama/FernandoGPT-v1\n\n\n\nDataset automatically created during the evaluation run of model samir-fama/FernandoGPT-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:27:16.928261(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of samir-fama/FernandoGPT-v1\n\n\n\nDataset automatically created during the evaluation run of model samir-fama/FernandoGPT-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:27:16.928261(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
6c4aad7c7eeeca729e1b0f1a7de7a5310e8a6284
# Dataset Card for Evaluation run of shadowml/Marcoro14-7B-ties <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [shadowml/Marcoro14-7B-ties](https://huggingface.co/shadowml/Marcoro14-7B-ties) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_shadowml__Marcoro14-7B-ties", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:27:34.207089](https://huggingface.co/datasets/open-llm-leaderboard/details_shadowml__Marcoro14-7B-ties/blob/main/results_2024-01-04T12-27-34.207089.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6557670960374431, "acc_stderr": 0.031998348451839013, "acc_norm": 0.6555797586821419, "acc_norm_stderr": 0.032660366522478446, "mc1": 0.4724602203182375, "mc1_stderr": 0.017476930190712187, "mc2": 0.6354053076486196, "mc2_stderr": 0.015212905778062237 }, "harness|arc:challenge|25": { "acc": 0.6749146757679181, "acc_stderr": 0.013688147309729125, "acc_norm": 0.6979522184300341, "acc_norm_stderr": 0.01341751914471641 }, "harness|hellaswag|10": { "acc": 0.6919936267675761, "acc_stderr": 0.004607256752931883, "acc_norm": 0.8713403704441346, "acc_norm_stderr": 0.003341385493187586 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.33, "acc_stderr": 0.04725815626252605, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252605 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04072314811876837, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04072314811876837 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.037385206761196686, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.037385206761196686 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7056603773584905, "acc_stderr": 0.02804918631569525, "acc_norm": 0.7056603773584905, "acc_norm_stderr": 0.02804918631569525 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.03643037168958548, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.03643037168958548 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816508, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816508 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6042553191489362, "acc_stderr": 0.031967586978353627, "acc_norm": 0.6042553191489362, "acc_norm_stderr": 0.031967586978353627 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.04154659671707548, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.04154659671707548 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.43386243386243384, "acc_stderr": 0.025525034382474887, "acc_norm": 0.43386243386243384, "acc_norm_stderr": 0.025525034382474887 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.023415293433568525, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.023415293433568525 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.49261083743842365, "acc_stderr": 0.03517603540361008, "acc_norm": 0.49261083743842365, "acc_norm_stderr": 0.03517603540361008 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7828282828282829, "acc_stderr": 0.029376616484945633, "acc_norm": 0.7828282828282829, "acc_norm_stderr": 0.029376616484945633 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402538, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402538 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.029185714949857413, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.029185714949857413 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6764705882352942, "acc_stderr": 0.030388353551886793, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.030388353551886793 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8532110091743119, "acc_stderr": 0.01517314184512625, "acc_norm": 0.8532110091743119, "acc_norm_stderr": 0.01517314184512625 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5185185185185185, "acc_stderr": 0.034076320938540516, "acc_norm": 0.5185185185185185, "acc_norm_stderr": 0.034076320938540516 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02552472232455335, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02552472232455335 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.025530100460233494, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.025530100460233494 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6905829596412556, "acc_stderr": 0.03102441174057221, "acc_norm": 0.6905829596412556, "acc_norm_stderr": 0.03102441174057221 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.034981493854624734, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.034981493854624734 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.44642857142857145, "acc_stderr": 0.04718471485219588, "acc_norm": 0.44642857142857145, "acc_norm_stderr": 0.04718471485219588 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092375, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092375 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8365261813537676, "acc_stderr": 0.013223928616741622, "acc_norm": 0.8365261813537676, "acc_norm_stderr": 0.013223928616741622 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7543352601156069, "acc_stderr": 0.023176298203992005, "acc_norm": 0.7543352601156069, "acc_norm_stderr": 0.023176298203992005 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.423463687150838, "acc_stderr": 0.016525425898773493, "acc_norm": 0.423463687150838, "acc_norm_stderr": 0.016525425898773493 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7320261437908496, "acc_stderr": 0.025360603796242557, "acc_norm": 0.7320261437908496, "acc_norm_stderr": 0.025360603796242557 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7138263665594855, "acc_stderr": 0.025670259242188933, "acc_norm": 0.7138263665594855, "acc_norm_stderr": 0.025670259242188933 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7623456790123457, "acc_stderr": 0.02368359183700856, "acc_norm": 0.7623456790123457, "acc_norm_stderr": 0.02368359183700856 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5, "acc_stderr": 0.029827499313594685, "acc_norm": 0.5, "acc_norm_stderr": 0.029827499313594685 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46870925684485004, "acc_stderr": 0.012745204626083135, "acc_norm": 0.46870925684485004, "acc_norm_stderr": 0.012745204626083135 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6727941176470589, "acc_stderr": 0.028501452860396556, "acc_norm": 0.6727941176470589, "acc_norm_stderr": 0.028501452860396556 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.684640522875817, "acc_stderr": 0.01879808628488689, "acc_norm": 0.684640522875817, "acc_norm_stderr": 0.01879808628488689 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142777, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142777 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.025870646766169136, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.025870646766169136 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.4724602203182375, "mc1_stderr": 0.017476930190712187, "mc2": 0.6354053076486196, "mc2_stderr": 0.015212905778062237 }, "harness|winogrande|5": { "acc": 0.8161010260457774, "acc_stderr": 0.01088791601330589 }, "harness|gsm8k|5": { "acc": 0.7088703563305534, "acc_stderr": 0.012513215297888463 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_shadowml__Marcoro14-7B-ties
[ "region:us" ]
2024-01-04T12:29:51+00:00
{"pretty_name": "Evaluation run of shadowml/Marcoro14-7B-ties", "dataset_summary": "Dataset automatically created during the evaluation run of model [shadowml/Marcoro14-7B-ties](https://huggingface.co/shadowml/Marcoro14-7B-ties) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_shadowml__Marcoro14-7B-ties\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:27:34.207089](https://huggingface.co/datasets/open-llm-leaderboard/details_shadowml__Marcoro14-7B-ties/blob/main/results_2024-01-04T12-27-34.207089.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6557670960374431,\n \"acc_stderr\": 0.031998348451839013,\n \"acc_norm\": 0.6555797586821419,\n \"acc_norm_stderr\": 0.032660366522478446,\n \"mc1\": 0.4724602203182375,\n \"mc1_stderr\": 0.017476930190712187,\n \"mc2\": 0.6354053076486196,\n \"mc2_stderr\": 0.015212905778062237\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6749146757679181,\n \"acc_stderr\": 0.013688147309729125,\n \"acc_norm\": 0.6979522184300341,\n \"acc_norm_stderr\": 0.01341751914471641\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6919936267675761,\n \"acc_stderr\": 0.004607256752931883,\n \"acc_norm\": 0.8713403704441346,\n \"acc_norm_stderr\": 0.003341385493187586\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252605,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252605\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.037385206761196686,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.037385206761196686\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7056603773584905,\n \"acc_stderr\": 0.02804918631569525,\n \"acc_norm\": 0.7056603773584905,\n \"acc_norm_stderr\": 0.02804918631569525\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.03643037168958548,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.03643037168958548\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816508,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816508\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6042553191489362,\n \"acc_stderr\": 0.031967586978353627,\n \"acc_norm\": 0.6042553191489362,\n \"acc_norm_stderr\": 0.031967586978353627\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.04154659671707548,\n \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.04154659671707548\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.43386243386243384,\n \"acc_stderr\": 0.025525034382474887,\n \"acc_norm\": 0.43386243386243384,\n \"acc_norm_stderr\": 0.025525034382474887\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.023415293433568525,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.023415293433568525\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.03517603540361008,\n \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.03517603540361008\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7828282828282829,\n \"acc_stderr\": 0.029376616484945633,\n \"acc_norm\": 0.7828282828282829,\n \"acc_norm_stderr\": 0.029376616484945633\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402538,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402538\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.029185714949857413,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.029185714949857413\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.030388353551886793,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.030388353551886793\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8532110091743119,\n \"acc_stderr\": 0.01517314184512625,\n \"acc_norm\": 0.8532110091743119,\n \"acc_norm_stderr\": 0.01517314184512625\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5185185185185185,\n \"acc_stderr\": 0.034076320938540516,\n \"acc_norm\": 0.5185185185185185,\n \"acc_norm_stderr\": 0.034076320938540516\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455335,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455335\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.025530100460233494,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.025530100460233494\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6905829596412556,\n \"acc_stderr\": 0.03102441174057221,\n \"acc_norm\": 0.6905829596412556,\n \"acc_norm_stderr\": 0.03102441174057221\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.034981493854624734,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.034981493854624734\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.44642857142857145,\n \"acc_stderr\": 0.04718471485219588,\n \"acc_norm\": 0.44642857142857145,\n \"acc_norm_stderr\": 0.04718471485219588\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092375,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092375\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8365261813537676,\n \"acc_stderr\": 0.013223928616741622,\n \"acc_norm\": 0.8365261813537676,\n \"acc_norm_stderr\": 0.013223928616741622\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7543352601156069,\n \"acc_stderr\": 0.023176298203992005,\n \"acc_norm\": 0.7543352601156069,\n \"acc_norm_stderr\": 0.023176298203992005\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.423463687150838,\n \"acc_stderr\": 0.016525425898773493,\n \"acc_norm\": 0.423463687150838,\n \"acc_norm_stderr\": 0.016525425898773493\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7320261437908496,\n \"acc_stderr\": 0.025360603796242557,\n \"acc_norm\": 0.7320261437908496,\n \"acc_norm_stderr\": 0.025360603796242557\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n \"acc_stderr\": 0.025670259242188933,\n \"acc_norm\": 0.7138263665594855,\n \"acc_norm_stderr\": 0.025670259242188933\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7623456790123457,\n \"acc_stderr\": 0.02368359183700856,\n \"acc_norm\": 0.7623456790123457,\n \"acc_norm_stderr\": 0.02368359183700856\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.029827499313594685,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.029827499313594685\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46870925684485004,\n \"acc_stderr\": 0.012745204626083135,\n \"acc_norm\": 0.46870925684485004,\n \"acc_norm_stderr\": 0.012745204626083135\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6727941176470589,\n \"acc_stderr\": 0.028501452860396556,\n \"acc_norm\": 0.6727941176470589,\n \"acc_norm_stderr\": 0.028501452860396556\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.684640522875817,\n \"acc_stderr\": 0.01879808628488689,\n \"acc_norm\": 0.684640522875817,\n \"acc_norm_stderr\": 0.01879808628488689\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142777,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142777\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.025870646766169136,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.025870646766169136\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4724602203182375,\n \"mc1_stderr\": 0.017476930190712187,\n \"mc2\": 0.6354053076486196,\n \"mc2_stderr\": 0.015212905778062237\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8161010260457774,\n \"acc_stderr\": 0.01088791601330589\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7088703563305534,\n \"acc_stderr\": 0.012513215297888463\n }\n}\n```", "repo_url": "https://huggingface.co/shadowml/Marcoro14-7B-ties", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-27-34.207089.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["**/details_harness|winogrande|5_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-27-34.207089.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_27_34.207089", "path": ["results_2024-01-04T12-27-34.207089.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-27-34.207089.parquet"]}]}]}
2024-01-04T12:30:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of shadowml/Marcoro14-7B-ties Dataset automatically created during the evaluation run of model shadowml/Marcoro14-7B-ties on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:27:34.207089(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of shadowml/Marcoro14-7B-ties\n\n\n\nDataset automatically created during the evaluation run of model shadowml/Marcoro14-7B-ties on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:27:34.207089(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of shadowml/Marcoro14-7B-ties\n\n\n\nDataset automatically created during the evaluation run of model shadowml/Marcoro14-7B-ties on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:27:34.207089(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of shadowml/Marcoro14-7B-ties\n\n\n\nDataset automatically created during the evaluation run of model shadowml/Marcoro14-7B-ties on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:27:34.207089(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
2fecfc7dbecb951c7fc27a97d6c74e890f8fa28c
# Dataset Card for Evaluation run of martyn/solar-megamerge-dare-10.7b-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [martyn/solar-megamerge-dare-10.7b-v1](https://huggingface.co/martyn/solar-megamerge-dare-10.7b-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_martyn__solar-megamerge-dare-10.7b-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:28:22.950465](https://huggingface.co/datasets/open-llm-leaderboard/details_martyn__solar-megamerge-dare-10.7b-v1/blob/main/results_2024-01-04T12-28-22.950465.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6610634231695184, "acc_stderr": 0.031657358174671586, "acc_norm": 0.6635919799924697, "acc_norm_stderr": 0.03229437004691903, "mc1": 0.386780905752754, "mc1_stderr": 0.01704885701051511, "mc2": 0.5433095073342544, "mc2_stderr": 0.015460055514713956 }, "harness|arc:challenge|25": { "acc": 0.6168941979522184, "acc_stderr": 0.014206472661672876, "acc_norm": 0.6612627986348123, "acc_norm_stderr": 0.013830568927974332 }, "harness|hellaswag|10": { "acc": 0.6608245369448317, "acc_stderr": 0.004724619193427587, "acc_norm": 0.8530173272256523, "acc_norm_stderr": 0.0035336498517284792 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7697368421052632, "acc_stderr": 0.03426059424403165, "acc_norm": 0.7697368421052632, "acc_norm_stderr": 0.03426059424403165 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6716981132075471, "acc_stderr": 0.02890159361241178, "acc_norm": 0.6716981132075471, "acc_norm_stderr": 0.02890159361241178 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.0358687928008034, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.0358687928008034 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.03643037168958548, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.03643037168958548 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.04784060704105654, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.04784060704105654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.04292346959909281, "acc_norm": 0.76, "acc_norm_stderr": 0.04292346959909281 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5914893617021276, "acc_stderr": 0.032134180267015755, "acc_norm": 0.5914893617021276, "acc_norm_stderr": 0.032134180267015755 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6068965517241379, "acc_stderr": 0.0407032901370707, "acc_norm": 0.6068965517241379, "acc_norm_stderr": 0.0407032901370707 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.47883597883597884, "acc_stderr": 0.025728230952130733, "acc_norm": 0.47883597883597884, "acc_norm_stderr": 0.025728230952130733 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.043902592653775614, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.043902592653775614 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.023415293433568532, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.023415293433568532 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.49261083743842365, "acc_stderr": 0.03517603540361008, "acc_norm": 0.49261083743842365, "acc_norm_stderr": 0.03517603540361008 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8181818181818182, "acc_stderr": 0.03011768892950357, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.03011768892950357 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8535353535353535, "acc_stderr": 0.025190921114603915, "acc_norm": 0.8535353535353535, "acc_norm_stderr": 0.025190921114603915 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.917098445595855, "acc_stderr": 0.01989934131572178, "acc_norm": 0.917098445595855, "acc_norm_stderr": 0.01989934131572178 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6153846153846154, "acc_stderr": 0.02466674491518721, "acc_norm": 0.6153846153846154, "acc_norm_stderr": 0.02466674491518721 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37777777777777777, "acc_stderr": 0.029560707392465718, "acc_norm": 0.37777777777777777, "acc_norm_stderr": 0.029560707392465718 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6974789915966386, "acc_stderr": 0.029837962388291932, "acc_norm": 0.6974789915966386, "acc_norm_stderr": 0.029837962388291932 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.39072847682119205, "acc_stderr": 0.03983798306659807, "acc_norm": 0.39072847682119205, "acc_norm_stderr": 0.03983798306659807 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8385321100917431, "acc_stderr": 0.015776239256163248, "acc_norm": 0.8385321100917431, "acc_norm_stderr": 0.015776239256163248 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5648148148148148, "acc_stderr": 0.03381200005643526, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.03381200005643526 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.025195658428931792, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.025195658428931792 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8523206751054853, "acc_stderr": 0.0230943295825957, "acc_norm": 0.8523206751054853, "acc_norm_stderr": 0.0230943295825957 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7174887892376681, "acc_stderr": 0.03021683101150878, "acc_norm": 0.7174887892376681, "acc_norm_stderr": 0.03021683101150878 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.732824427480916, "acc_stderr": 0.03880848301082396, "acc_norm": 0.732824427480916, "acc_norm_stderr": 0.03880848301082396 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098824, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098824 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.03957835471980981, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.03957835471980981 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.04058042015646034, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.04058042015646034 }, "harness|hendrycksTest-marketing|5": { "acc": 0.905982905982906, "acc_stderr": 0.019119892798924974, "acc_norm": 0.905982905982906, "acc_norm_stderr": 0.019119892798924974 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8250319284802043, "acc_stderr": 0.013586619219903348, "acc_norm": 0.8250319284802043, "acc_norm_stderr": 0.013586619219903348 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7254335260115607, "acc_stderr": 0.02402774515526502, "acc_norm": 0.7254335260115607, "acc_norm_stderr": 0.02402774515526502 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4223463687150838, "acc_stderr": 0.016519594275297117, "acc_norm": 0.4223463687150838, "acc_norm_stderr": 0.016519594275297117 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7287581699346405, "acc_stderr": 0.025457756696667874, "acc_norm": 0.7287581699346405, "acc_norm_stderr": 0.025457756696667874 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.707395498392283, "acc_stderr": 0.02583989833487798, "acc_norm": 0.707395498392283, "acc_norm_stderr": 0.02583989833487798 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7654320987654321, "acc_stderr": 0.023576881744005705, "acc_norm": 0.7654320987654321, "acc_norm_stderr": 0.023576881744005705 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5106382978723404, "acc_stderr": 0.02982074719142244, "acc_norm": 0.5106382978723404, "acc_norm_stderr": 0.02982074719142244 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4876140808344198, "acc_stderr": 0.012766317315473556, "acc_norm": 0.4876140808344198, "acc_norm_stderr": 0.012766317315473556 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7205882352941176, "acc_stderr": 0.02725720260611494, "acc_norm": 0.7205882352941176, "acc_norm_stderr": 0.02725720260611494 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.7107843137254902, "acc_stderr": 0.018342529845275915, "acc_norm": 0.7107843137254902, "acc_norm_stderr": 0.018342529845275915 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7454545454545455, "acc_stderr": 0.04172343038705383, "acc_norm": 0.7454545454545455, "acc_norm_stderr": 0.04172343038705383 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7836734693877551, "acc_stderr": 0.026358916334904028, "acc_norm": 0.7836734693877551, "acc_norm_stderr": 0.026358916334904028 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578337, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578337 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352203, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685515, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685515 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8070175438596491, "acc_stderr": 0.030267457554898458, "acc_norm": 0.8070175438596491, "acc_norm_stderr": 0.030267457554898458 }, "harness|truthfulqa:mc|0": { "mc1": 0.386780905752754, "mc1_stderr": 0.01704885701051511, "mc2": 0.5433095073342544, "mc2_stderr": 0.015460055514713956 }, "harness|winogrande|5": { "acc": 0.829518547750592, "acc_stderr": 0.010569021122825905 }, "harness|gsm8k|5": { "acc": 0.5799848369977255, "acc_stderr": 0.013595121688520485 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_martyn__solar-megamerge-dare-10.7b-v1
[ "region:us" ]
2024-01-04T12:30:43+00:00
{"pretty_name": "Evaluation run of martyn/solar-megamerge-dare-10.7b-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [martyn/solar-megamerge-dare-10.7b-v1](https://huggingface.co/martyn/solar-megamerge-dare-10.7b-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_martyn__solar-megamerge-dare-10.7b-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:28:22.950465](https://huggingface.co/datasets/open-llm-leaderboard/details_martyn__solar-megamerge-dare-10.7b-v1/blob/main/results_2024-01-04T12-28-22.950465.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6610634231695184,\n \"acc_stderr\": 0.031657358174671586,\n \"acc_norm\": 0.6635919799924697,\n \"acc_norm_stderr\": 0.03229437004691903,\n \"mc1\": 0.386780905752754,\n \"mc1_stderr\": 0.01704885701051511,\n \"mc2\": 0.5433095073342544,\n \"mc2_stderr\": 0.015460055514713956\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6168941979522184,\n \"acc_stderr\": 0.014206472661672876,\n \"acc_norm\": 0.6612627986348123,\n \"acc_norm_stderr\": 0.013830568927974332\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6608245369448317,\n \"acc_stderr\": 0.004724619193427587,\n \"acc_norm\": 0.8530173272256523,\n \"acc_norm_stderr\": 0.0035336498517284792\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7697368421052632,\n \"acc_stderr\": 0.03426059424403165,\n \"acc_norm\": 0.7697368421052632,\n \"acc_norm_stderr\": 0.03426059424403165\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6716981132075471,\n \"acc_stderr\": 0.02890159361241178,\n \"acc_norm\": 0.6716981132075471,\n \"acc_norm_stderr\": 0.02890159361241178\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.0358687928008034,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.0358687928008034\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.03643037168958548,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.03643037168958548\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.04784060704105654,\n \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.04784060704105654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.04292346959909281,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.04292346959909281\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5914893617021276,\n \"acc_stderr\": 0.032134180267015755,\n \"acc_norm\": 0.5914893617021276,\n \"acc_norm_stderr\": 0.032134180267015755\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6068965517241379,\n \"acc_stderr\": 0.0407032901370707,\n \"acc_norm\": 0.6068965517241379,\n \"acc_norm_stderr\": 0.0407032901370707\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.47883597883597884,\n \"acc_stderr\": 0.025728230952130733,\n \"acc_norm\": 0.47883597883597884,\n \"acc_norm_stderr\": 0.025728230952130733\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.043902592653775614,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.043902592653775614\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.023415293433568532,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.023415293433568532\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.49261083743842365,\n \"acc_stderr\": 0.03517603540361008,\n \"acc_norm\": 0.49261083743842365,\n \"acc_norm_stderr\": 0.03517603540361008\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.03011768892950357,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.03011768892950357\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8535353535353535,\n \"acc_stderr\": 0.025190921114603915,\n \"acc_norm\": 0.8535353535353535,\n \"acc_norm_stderr\": 0.025190921114603915\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.917098445595855,\n \"acc_stderr\": 0.01989934131572178,\n \"acc_norm\": 0.917098445595855,\n \"acc_norm_stderr\": 0.01989934131572178\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6153846153846154,\n \"acc_stderr\": 0.02466674491518721,\n \"acc_norm\": 0.6153846153846154,\n \"acc_norm_stderr\": 0.02466674491518721\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37777777777777777,\n \"acc_stderr\": 0.029560707392465718,\n \"acc_norm\": 0.37777777777777777,\n \"acc_norm_stderr\": 0.029560707392465718\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6974789915966386,\n \"acc_stderr\": 0.029837962388291932,\n \"acc_norm\": 0.6974789915966386,\n \"acc_norm_stderr\": 0.029837962388291932\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.39072847682119205,\n \"acc_stderr\": 0.03983798306659807,\n \"acc_norm\": 0.39072847682119205,\n \"acc_norm_stderr\": 0.03983798306659807\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8385321100917431,\n \"acc_stderr\": 0.015776239256163248,\n \"acc_norm\": 0.8385321100917431,\n \"acc_norm_stderr\": 0.015776239256163248\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.03381200005643526,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.03381200005643526\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.025195658428931792,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.025195658428931792\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8523206751054853,\n \"acc_stderr\": 0.0230943295825957,\n \"acc_norm\": 0.8523206751054853,\n \"acc_norm_stderr\": 0.0230943295825957\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7174887892376681,\n \"acc_stderr\": 0.03021683101150878,\n \"acc_norm\": 0.7174887892376681,\n \"acc_norm_stderr\": 0.03021683101150878\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.732824427480916,\n \"acc_stderr\": 0.03880848301082396,\n \"acc_norm\": 0.732824427480916,\n \"acc_norm_stderr\": 0.03880848301082396\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098824,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098824\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.03957835471980981,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.03957835471980981\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.04058042015646034,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.04058042015646034\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.905982905982906,\n \"acc_stderr\": 0.019119892798924974,\n \"acc_norm\": 0.905982905982906,\n \"acc_norm_stderr\": 0.019119892798924974\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.013586619219903348,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.013586619219903348\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7254335260115607,\n \"acc_stderr\": 0.02402774515526502,\n \"acc_norm\": 0.7254335260115607,\n \"acc_norm_stderr\": 0.02402774515526502\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4223463687150838,\n \"acc_stderr\": 0.016519594275297117,\n \"acc_norm\": 0.4223463687150838,\n \"acc_norm_stderr\": 0.016519594275297117\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7287581699346405,\n \"acc_stderr\": 0.025457756696667874,\n \"acc_norm\": 0.7287581699346405,\n \"acc_norm_stderr\": 0.025457756696667874\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.707395498392283,\n \"acc_stderr\": 0.02583989833487798,\n \"acc_norm\": 0.707395498392283,\n \"acc_norm_stderr\": 0.02583989833487798\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7654320987654321,\n \"acc_stderr\": 0.023576881744005705,\n \"acc_norm\": 0.7654320987654321,\n \"acc_norm_stderr\": 0.023576881744005705\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5106382978723404,\n \"acc_stderr\": 0.02982074719142244,\n \"acc_norm\": 0.5106382978723404,\n \"acc_norm_stderr\": 0.02982074719142244\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4876140808344198,\n \"acc_stderr\": 0.012766317315473556,\n \"acc_norm\": 0.4876140808344198,\n \"acc_norm_stderr\": 0.012766317315473556\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7205882352941176,\n \"acc_stderr\": 0.02725720260611494,\n \"acc_norm\": 0.7205882352941176,\n \"acc_norm_stderr\": 0.02725720260611494\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.7107843137254902,\n \"acc_stderr\": 0.018342529845275915,\n \"acc_norm\": 0.7107843137254902,\n \"acc_norm_stderr\": 0.018342529845275915\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7454545454545455,\n \"acc_stderr\": 0.04172343038705383,\n \"acc_norm\": 0.7454545454545455,\n \"acc_norm_stderr\": 0.04172343038705383\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7836734693877551,\n \"acc_stderr\": 0.026358916334904028,\n \"acc_norm\": 0.7836734693877551,\n \"acc_norm_stderr\": 0.026358916334904028\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578337,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578337\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685515,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685515\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8070175438596491,\n \"acc_stderr\": 0.030267457554898458,\n \"acc_norm\": 0.8070175438596491,\n \"acc_norm_stderr\": 0.030267457554898458\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.386780905752754,\n \"mc1_stderr\": 0.01704885701051511,\n \"mc2\": 0.5433095073342544,\n \"mc2_stderr\": 0.015460055514713956\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.829518547750592,\n \"acc_stderr\": 0.010569021122825905\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5799848369977255,\n \"acc_stderr\": 0.013595121688520485\n }\n}\n```", "repo_url": "https://huggingface.co/martyn/solar-megamerge-dare-10.7b-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-28-22.950465.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["**/details_harness|winogrande|5_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-28-22.950465.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_28_22.950465", "path": ["results_2024-01-04T12-28-22.950465.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-28-22.950465.parquet"]}]}]}
2024-01-04T12:31:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of martyn/solar-megamerge-dare-10.7b-v1 Dataset automatically created during the evaluation run of model martyn/solar-megamerge-dare-10.7b-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:28:22.950465(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of martyn/solar-megamerge-dare-10.7b-v1\n\n\n\nDataset automatically created during the evaluation run of model martyn/solar-megamerge-dare-10.7b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:28:22.950465(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of martyn/solar-megamerge-dare-10.7b-v1\n\n\n\nDataset automatically created during the evaluation run of model martyn/solar-megamerge-dare-10.7b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:28:22.950465(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 197, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of martyn/solar-megamerge-dare-10.7b-v1\n\n\n\nDataset automatically created during the evaluation run of model martyn/solar-megamerge-dare-10.7b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:28:22.950465(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
0cbe275f59cfb01c3f4ecf6c86d7d80092a0881a
# Dataset Card for Evaluation run of chargoddard/SmolLlamix-8x101M <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [chargoddard/SmolLlamix-8x101M](https://huggingface.co/chargoddard/SmolLlamix-8x101M) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_chargoddard__SmolLlamix-8x101M", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:29:56.794531](https://huggingface.co/datasets/open-llm-leaderboard/details_chargoddard__SmolLlamix-8x101M/blob/main/results_2024-01-04T12-29-56.794531.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.24665141008843472, "acc_stderr": 0.030422170490043785, "acc_norm": 0.24716769398389823, "acc_norm_stderr": 0.031197299482121136, "mc1": 0.26193390452876375, "mc1_stderr": 0.015392118805015021, "mc2": 0.4608972262894305, "mc2_stderr": 0.015343271963572871 }, "harness|arc:challenge|25": { "acc": 0.17918088737201365, "acc_stderr": 0.011207045216615667, "acc_norm": 0.22696245733788395, "acc_norm_stderr": 0.012240491536132866 }, "harness|hellaswag|10": { "acc": 0.2765385381398128, "acc_stderr": 0.0044637210713190986, "acc_norm": 0.28500298745269864, "acc_norm_stderr": 0.004504932999736393 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.26666666666666666, "acc_stderr": 0.03820169914517904, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.03820169914517904 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.18421052631578946, "acc_stderr": 0.0315469804508223, "acc_norm": 0.18421052631578946, "acc_norm_stderr": 0.0315469804508223 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.22, "acc_stderr": 0.04163331998932268, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932268 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.22264150943396227, "acc_stderr": 0.0256042334708991, "acc_norm": 0.22264150943396227, "acc_norm_stderr": 0.0256042334708991 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.22916666666666666, "acc_stderr": 0.035146974678623884, "acc_norm": 0.22916666666666666, "acc_norm_stderr": 0.035146974678623884 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.15, "acc_stderr": 0.035887028128263714, "acc_norm": 0.15, "acc_norm_stderr": 0.035887028128263714 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.19653179190751446, "acc_stderr": 0.03029957466478814, "acc_norm": 0.19653179190751446, "acc_norm_stderr": 0.03029957466478814 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237654, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237654 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.22127659574468084, "acc_stderr": 0.027136349602424063, "acc_norm": 0.22127659574468084, "acc_norm_stderr": 0.027136349602424063 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.04049339297748141, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748141 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.20689655172413793, "acc_stderr": 0.03375672449560553, "acc_norm": 0.20689655172413793, "acc_norm_stderr": 0.03375672449560553 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24338624338624337, "acc_stderr": 0.022101128787415433, "acc_norm": 0.24338624338624337, "acc_norm_stderr": 0.022101128787415433 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.1746031746031746, "acc_stderr": 0.03395490020856113, "acc_norm": 0.1746031746031746, "acc_norm_stderr": 0.03395490020856113 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.31290322580645163, "acc_stderr": 0.02637756702864586, "acc_norm": 0.31290322580645163, "acc_norm_stderr": 0.02637756702864586 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.30049261083743845, "acc_stderr": 0.03225799476233483, "acc_norm": 0.30049261083743845, "acc_norm_stderr": 0.03225799476233483 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2, "acc_stderr": 0.031234752377721175, "acc_norm": 0.2, "acc_norm_stderr": 0.031234752377721175 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.26262626262626265, "acc_stderr": 0.031353050095330855, "acc_norm": 0.26262626262626265, "acc_norm_stderr": 0.031353050095330855 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.34196891191709844, "acc_stderr": 0.03423465100104281, "acc_norm": 0.34196891191709844, "acc_norm_stderr": 0.03423465100104281 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.2641025641025641, "acc_stderr": 0.022352193737453285, "acc_norm": 0.2641025641025641, "acc_norm_stderr": 0.022352193737453285 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.027309140588230182, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.027309140588230182 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.24369747899159663, "acc_stderr": 0.027886828078380572, "acc_norm": 0.24369747899159663, "acc_norm_stderr": 0.027886828078380572 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.26490066225165565, "acc_stderr": 0.036030385453603826, "acc_norm": 0.26490066225165565, "acc_norm_stderr": 0.036030385453603826 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.22935779816513763, "acc_stderr": 0.018025349724618684, "acc_norm": 0.22935779816513763, "acc_norm_stderr": 0.018025349724618684 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4305555555555556, "acc_stderr": 0.03376922151252335, "acc_norm": 0.4305555555555556, "acc_norm_stderr": 0.03376922151252335 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.22058823529411764, "acc_stderr": 0.02910225438967409, "acc_norm": 0.22058823529411764, "acc_norm_stderr": 0.02910225438967409 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.26582278481012656, "acc_stderr": 0.028756799629658342, "acc_norm": 0.26582278481012656, "acc_norm_stderr": 0.028756799629658342 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.23766816143497757, "acc_stderr": 0.028568079464714267, "acc_norm": 0.23766816143497757, "acc_norm_stderr": 0.028568079464714267 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.22900763358778625, "acc_stderr": 0.036853466317118506, "acc_norm": 0.22900763358778625, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2809917355371901, "acc_stderr": 0.04103203830514512, "acc_norm": 0.2809917355371901, "acc_norm_stderr": 0.04103203830514512 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.24539877300613497, "acc_stderr": 0.03380939813943354, "acc_norm": 0.24539877300613497, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.23214285714285715, "acc_stderr": 0.04007341809755807, "acc_norm": 0.23214285714285715, "acc_norm_stderr": 0.04007341809755807 }, "harness|hendrycksTest-management|5": { "acc": 0.1941747572815534, "acc_stderr": 0.03916667762822585, "acc_norm": 0.1941747572815534, "acc_norm_stderr": 0.03916667762822585 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2264957264957265, "acc_stderr": 0.027421007295392912, "acc_norm": 0.2264957264957265, "acc_norm_stderr": 0.027421007295392912 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2567049808429119, "acc_stderr": 0.015620480263064526, "acc_norm": 0.2567049808429119, "acc_norm_stderr": 0.015620480263064526 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24855491329479767, "acc_stderr": 0.023267528432100174, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24692737430167597, "acc_stderr": 0.014422292204808835, "acc_norm": 0.24692737430167597, "acc_norm_stderr": 0.014422292204808835 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2222222222222222, "acc_stderr": 0.02380518652488814, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.02380518652488814 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.29260450160771706, "acc_stderr": 0.025839898334877983, "acc_norm": 0.29260450160771706, "acc_norm_stderr": 0.025839898334877983 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.24074074074074073, "acc_stderr": 0.02378858355165854, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.02378858355165854 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2624113475177305, "acc_stderr": 0.026244920349843007, "acc_norm": 0.2624113475177305, "acc_norm_stderr": 0.026244920349843007 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24511082138200782, "acc_stderr": 0.010986307870045509, "acc_norm": 0.24511082138200782, "acc_norm_stderr": 0.010986307870045509 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.36764705882352944, "acc_stderr": 0.029289413409403192, "acc_norm": 0.36764705882352944, "acc_norm_stderr": 0.029289413409403192 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.22549019607843138, "acc_stderr": 0.016906615927288152, "acc_norm": 0.22549019607843138, "acc_norm_stderr": 0.016906615927288152 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.18181818181818182, "acc_stderr": 0.036942843353378, "acc_norm": 0.18181818181818182, "acc_norm_stderr": 0.036942843353378 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2571428571428571, "acc_stderr": 0.02797982353874455, "acc_norm": 0.2571428571428571, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.030147775935409217, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.030147775935409217 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.24, "acc_stderr": 0.042923469599092816, "acc_norm": 0.24, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-virology|5": { "acc": 0.21686746987951808, "acc_stderr": 0.03208284450356365, "acc_norm": 0.21686746987951808, "acc_norm_stderr": 0.03208284450356365 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.21052631578947367, "acc_stderr": 0.03126781714663179, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.03126781714663179 }, "harness|truthfulqa:mc|0": { "mc1": 0.26193390452876375, "mc1_stderr": 0.015392118805015021, "mc2": 0.4608972262894305, "mc2_stderr": 0.015343271963572871 }, "harness|winogrande|5": { "acc": 0.5130228887134964, "acc_stderr": 0.014047718393997663 }, "harness|gsm8k|5": { "acc": 0.006065200909780136, "acc_stderr": 0.0021386703014604725 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_chargoddard__SmolLlamix-8x101M
[ "region:us" ]
2024-01-04T12:32:19+00:00
{"pretty_name": "Evaluation run of chargoddard/SmolLlamix-8x101M", "dataset_summary": "Dataset automatically created during the evaluation run of model [chargoddard/SmolLlamix-8x101M](https://huggingface.co/chargoddard/SmolLlamix-8x101M) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_chargoddard__SmolLlamix-8x101M\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:29:56.794531](https://huggingface.co/datasets/open-llm-leaderboard/details_chargoddard__SmolLlamix-8x101M/blob/main/results_2024-01-04T12-29-56.794531.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.24665141008843472,\n \"acc_stderr\": 0.030422170490043785,\n \"acc_norm\": 0.24716769398389823,\n \"acc_norm_stderr\": 0.031197299482121136,\n \"mc1\": 0.26193390452876375,\n \"mc1_stderr\": 0.015392118805015021,\n \"mc2\": 0.4608972262894305,\n \"mc2_stderr\": 0.015343271963572871\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.17918088737201365,\n \"acc_stderr\": 0.011207045216615667,\n \"acc_norm\": 0.22696245733788395,\n \"acc_norm_stderr\": 0.012240491536132866\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2765385381398128,\n \"acc_stderr\": 0.0044637210713190986,\n \"acc_norm\": 0.28500298745269864,\n \"acc_norm_stderr\": 0.004504932999736393\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.03820169914517904,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.03820169914517904\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.18421052631578946,\n \"acc_stderr\": 0.0315469804508223,\n \"acc_norm\": 0.18421052631578946,\n \"acc_norm_stderr\": 0.0315469804508223\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932268,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932268\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.22264150943396227,\n \"acc_stderr\": 0.0256042334708991,\n \"acc_norm\": 0.22264150943396227,\n \"acc_norm_stderr\": 0.0256042334708991\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.22916666666666666,\n \"acc_stderr\": 0.035146974678623884,\n \"acc_norm\": 0.22916666666666666,\n \"acc_norm_stderr\": 0.035146974678623884\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.15,\n \"acc_stderr\": 0.035887028128263714,\n \"acc_norm\": 0.15,\n \"acc_norm_stderr\": 0.035887028128263714\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.19653179190751446,\n \"acc_stderr\": 0.03029957466478814,\n \"acc_norm\": 0.19653179190751446,\n \"acc_norm_stderr\": 0.03029957466478814\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237654,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237654\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.22127659574468084,\n \"acc_stderr\": 0.027136349602424063,\n \"acc_norm\": 0.22127659574468084,\n \"acc_norm_stderr\": 0.027136349602424063\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.24561403508771928,\n \"acc_stderr\": 0.04049339297748141,\n \"acc_norm\": 0.24561403508771928,\n \"acc_norm_stderr\": 0.04049339297748141\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.20689655172413793,\n \"acc_stderr\": 0.03375672449560553,\n \"acc_norm\": 0.20689655172413793,\n \"acc_norm_stderr\": 0.03375672449560553\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.24338624338624337,\n \"acc_stderr\": 0.022101128787415433,\n \"acc_norm\": 0.24338624338624337,\n \"acc_norm_stderr\": 0.022101128787415433\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.1746031746031746,\n \"acc_stderr\": 0.03395490020856113,\n \"acc_norm\": 0.1746031746031746,\n \"acc_norm_stderr\": 0.03395490020856113\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.31290322580645163,\n \"acc_stderr\": 0.02637756702864586,\n \"acc_norm\": 0.31290322580645163,\n \"acc_norm_stderr\": 0.02637756702864586\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.30049261083743845,\n \"acc_stderr\": 0.03225799476233483,\n \"acc_norm\": 0.30049261083743845,\n \"acc_norm_stderr\": 0.03225799476233483\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.031234752377721175,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.031234752377721175\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.26262626262626265,\n \"acc_stderr\": 0.031353050095330855,\n \"acc_norm\": 0.26262626262626265,\n \"acc_norm_stderr\": 0.031353050095330855\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.34196891191709844,\n \"acc_stderr\": 0.03423465100104281,\n \"acc_norm\": 0.34196891191709844,\n \"acc_norm_stderr\": 0.03423465100104281\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.2641025641025641,\n \"acc_stderr\": 0.022352193737453285,\n \"acc_norm\": 0.2641025641025641,\n \"acc_norm_stderr\": 0.022352193737453285\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.027309140588230182,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.027309140588230182\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.24369747899159663,\n \"acc_stderr\": 0.027886828078380572,\n \"acc_norm\": 0.24369747899159663,\n \"acc_norm_stderr\": 0.027886828078380572\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.26490066225165565,\n \"acc_stderr\": 0.036030385453603826,\n \"acc_norm\": 0.26490066225165565,\n \"acc_norm_stderr\": 0.036030385453603826\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.22935779816513763,\n \"acc_stderr\": 0.018025349724618684,\n \"acc_norm\": 0.22935779816513763,\n \"acc_norm_stderr\": 0.018025349724618684\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4305555555555556,\n \"acc_stderr\": 0.03376922151252335,\n \"acc_norm\": 0.4305555555555556,\n \"acc_norm_stderr\": 0.03376922151252335\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.22058823529411764,\n \"acc_stderr\": 0.02910225438967409,\n \"acc_norm\": 0.22058823529411764,\n \"acc_norm_stderr\": 0.02910225438967409\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.26582278481012656,\n \"acc_stderr\": 0.028756799629658342,\n \"acc_norm\": 0.26582278481012656,\n \"acc_norm_stderr\": 0.028756799629658342\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.23766816143497757,\n \"acc_stderr\": 0.028568079464714267,\n \"acc_norm\": 0.23766816143497757,\n \"acc_norm_stderr\": 0.028568079464714267\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.22900763358778625,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.22900763358778625,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2809917355371901,\n \"acc_stderr\": 0.04103203830514512,\n \"acc_norm\": 0.2809917355371901,\n \"acc_norm_stderr\": 0.04103203830514512\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.24539877300613497,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.24539877300613497,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.23214285714285715,\n \"acc_stderr\": 0.04007341809755807,\n \"acc_norm\": 0.23214285714285715,\n \"acc_norm_stderr\": 0.04007341809755807\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.1941747572815534,\n \"acc_stderr\": 0.03916667762822585,\n \"acc_norm\": 0.1941747572815534,\n \"acc_norm_stderr\": 0.03916667762822585\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2264957264957265,\n \"acc_stderr\": 0.027421007295392912,\n \"acc_norm\": 0.2264957264957265,\n \"acc_norm_stderr\": 0.027421007295392912\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2567049808429119,\n \"acc_stderr\": 0.015620480263064526,\n \"acc_norm\": 0.2567049808429119,\n \"acc_norm_stderr\": 0.015620480263064526\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24692737430167597,\n \"acc_stderr\": 0.014422292204808835,\n \"acc_norm\": 0.24692737430167597,\n \"acc_norm_stderr\": 0.014422292204808835\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.02380518652488814,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.02380518652488814\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.29260450160771706,\n \"acc_stderr\": 0.025839898334877983,\n \"acc_norm\": 0.29260450160771706,\n \"acc_norm_stderr\": 0.025839898334877983\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.02378858355165854,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.02378858355165854\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2624113475177305,\n \"acc_stderr\": 0.026244920349843007,\n \"acc_norm\": 0.2624113475177305,\n \"acc_norm_stderr\": 0.026244920349843007\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24511082138200782,\n \"acc_stderr\": 0.010986307870045509,\n \"acc_norm\": 0.24511082138200782,\n \"acc_norm_stderr\": 0.010986307870045509\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.36764705882352944,\n \"acc_stderr\": 0.029289413409403192,\n \"acc_norm\": 0.36764705882352944,\n \"acc_norm_stderr\": 0.029289413409403192\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.22549019607843138,\n \"acc_stderr\": 0.016906615927288152,\n \"acc_norm\": 0.22549019607843138,\n \"acc_norm_stderr\": 0.016906615927288152\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.18181818181818182,\n \"acc_stderr\": 0.036942843353378,\n \"acc_norm\": 0.18181818181818182,\n \"acc_norm_stderr\": 0.036942843353378\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.2571428571428571,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.2571428571428571,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23880597014925373,\n \"acc_stderr\": 0.030147775935409217,\n \"acc_norm\": 0.23880597014925373,\n \"acc_norm_stderr\": 0.030147775935409217\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.21686746987951808,\n \"acc_stderr\": 0.03208284450356365,\n \"acc_norm\": 0.21686746987951808,\n \"acc_norm_stderr\": 0.03208284450356365\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.21052631578947367,\n \"acc_stderr\": 0.03126781714663179,\n \"acc_norm\": 0.21052631578947367,\n \"acc_norm_stderr\": 0.03126781714663179\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.26193390452876375,\n \"mc1_stderr\": 0.015392118805015021,\n \"mc2\": 0.4608972262894305,\n \"mc2_stderr\": 0.015343271963572871\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5130228887134964,\n \"acc_stderr\": 0.014047718393997663\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.006065200909780136,\n \"acc_stderr\": 0.0021386703014604725\n }\n}\n```", "repo_url": "https://huggingface.co/chargoddard/SmolLlamix-8x101M", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-29-56.794531.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["**/details_harness|winogrande|5_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-29-56.794531.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_29_56.794531", "path": ["results_2024-01-04T12-29-56.794531.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-29-56.794531.parquet"]}]}]}
2024-01-04T12:32:42+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of chargoddard/SmolLlamix-8x101M Dataset automatically created during the evaluation run of model chargoddard/SmolLlamix-8x101M on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:29:56.794531(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of chargoddard/SmolLlamix-8x101M\n\n\n\nDataset automatically created during the evaluation run of model chargoddard/SmolLlamix-8x101M on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:29:56.794531(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of chargoddard/SmolLlamix-8x101M\n\n\n\nDataset automatically created during the evaluation run of model chargoddard/SmolLlamix-8x101M on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:29:56.794531(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of chargoddard/SmolLlamix-8x101M\n\n\n\nDataset automatically created during the evaluation run of model chargoddard/SmolLlamix-8x101M on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:29:56.794531(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
2d2326d5173ecbd758d17b179092aa5a44d31f51
# Dataset Card for Evaluation run of uukuguy/Mistral-7B-OpenOrca-lora-merged <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [uukuguy/Mistral-7B-OpenOrca-lora-merged](https://huggingface.co/uukuguy/Mistral-7B-OpenOrca-lora-merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_uukuguy__Mistral-7B-OpenOrca-lora-merged", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:30:42.167357](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__Mistral-7B-OpenOrca-lora-merged/blob/main/results_2024-01-04T12-30-42.167357.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6401848053710576, "acc_stderr": 0.03223183201048062, "acc_norm": 0.6462692467757035, "acc_norm_stderr": 0.03287896875364672, "mc1": 0.2839657282741738, "mc1_stderr": 0.015785370858396725, "mc2": 0.4270116265533286, "mc2_stderr": 0.01423822627667514 }, "harness|arc:challenge|25": { "acc": 0.5725255972696246, "acc_stderr": 0.014456862944650649, "acc_norm": 0.6177474402730375, "acc_norm_stderr": 0.014200454049979275 }, "harness|hellaswag|10": { "acc": 0.6360286795459071, "acc_stderr": 0.0048015720289207925, "acc_norm": 0.8360884285998805, "acc_norm_stderr": 0.0036943873611776485 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6578947368421053, "acc_stderr": 0.03860731599316091, "acc_norm": 0.6578947368421053, "acc_norm_stderr": 0.03860731599316091 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.690566037735849, "acc_stderr": 0.028450154794118637, "acc_norm": 0.690566037735849, "acc_norm_stderr": 0.028450154794118637 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6416184971098265, "acc_stderr": 0.036563436533531585, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.036563436533531585 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.048580835742663454, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.048580835742663454 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5914893617021276, "acc_stderr": 0.032134180267015755, "acc_norm": 0.5914893617021276, "acc_norm_stderr": 0.032134180267015755 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370332, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370332 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3915343915343915, "acc_stderr": 0.025138091388851112, "acc_norm": 0.3915343915343915, "acc_norm_stderr": 0.025138091388851112 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7677419354838709, "acc_stderr": 0.024022256130308235, "acc_norm": 0.7677419354838709, "acc_norm_stderr": 0.024022256130308235 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5320197044334976, "acc_stderr": 0.035107665979592154, "acc_norm": 0.5320197044334976, "acc_norm_stderr": 0.035107665979592154 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7676767676767676, "acc_stderr": 0.030088629490217487, "acc_norm": 0.7676767676767676, "acc_norm_stderr": 0.030088629490217487 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8808290155440415, "acc_stderr": 0.02338193534812143, "acc_norm": 0.8808290155440415, "acc_norm_stderr": 0.02338193534812143 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563973, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563973 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.02911661760608301, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.02911661760608301 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6680672268907563, "acc_stderr": 0.03058869701378364, "acc_norm": 0.6680672268907563, "acc_norm_stderr": 0.03058869701378364 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.31788079470198677, "acc_stderr": 0.038020397601079024, "acc_norm": 0.31788079470198677, "acc_norm_stderr": 0.038020397601079024 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.818348623853211, "acc_stderr": 0.016530617409266875, "acc_norm": 0.818348623853211, "acc_norm_stderr": 0.016530617409266875 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5648148148148148, "acc_stderr": 0.03381200005643527, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.03381200005643527 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.803921568627451, "acc_stderr": 0.027865942286639318, "acc_norm": 0.803921568627451, "acc_norm_stderr": 0.027865942286639318 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7637130801687764, "acc_stderr": 0.027652153144159263, "acc_norm": 0.7637130801687764, "acc_norm_stderr": 0.027652153144159263 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7085201793721974, "acc_stderr": 0.03050028317654585, "acc_norm": 0.7085201793721974, "acc_norm_stderr": 0.03050028317654585 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7862595419847328, "acc_stderr": 0.0359546161177469, "acc_norm": 0.7862595419847328, "acc_norm_stderr": 0.0359546161177469 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7914110429447853, "acc_stderr": 0.031921934489347235, "acc_norm": 0.7914110429447853, "acc_norm_stderr": 0.031921934489347235 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.04738975119274155, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.04738975119274155 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.03989139859531771, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.03989139859531771 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8846153846153846, "acc_stderr": 0.020930193185179333, "acc_norm": 0.8846153846153846, "acc_norm_stderr": 0.020930193185179333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8122605363984674, "acc_stderr": 0.013964393769899133, "acc_norm": 0.8122605363984674, "acc_norm_stderr": 0.013964393769899133 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7167630057803468, "acc_stderr": 0.024257901705323378, "acc_norm": 0.7167630057803468, "acc_norm_stderr": 0.024257901705323378 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.33743016759776534, "acc_stderr": 0.015813901283913044, "acc_norm": 0.33743016759776534, "acc_norm_stderr": 0.015813901283913044 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7483660130718954, "acc_stderr": 0.0248480182638752, "acc_norm": 0.7483660130718954, "acc_norm_stderr": 0.0248480182638752 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7138263665594855, "acc_stderr": 0.02567025924218894, "acc_norm": 0.7138263665594855, "acc_norm_stderr": 0.02567025924218894 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7345679012345679, "acc_stderr": 0.024569223600460845, "acc_norm": 0.7345679012345679, "acc_norm_stderr": 0.024569223600460845 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.44589308996088656, "acc_stderr": 0.012695244711379774, "acc_norm": 0.44589308996088656, "acc_norm_stderr": 0.012695244711379774 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6875, "acc_stderr": 0.02815637344037142, "acc_norm": 0.6875, "acc_norm_stderr": 0.02815637344037142 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6813725490196079, "acc_stderr": 0.018850084696468712, "acc_norm": 0.6813725490196079, "acc_norm_stderr": 0.018850084696468712 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.028263889943784593, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.028263889943784593 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.026508590656233264, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.026508590656233264 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.2839657282741738, "mc1_stderr": 0.015785370858396725, "mc2": 0.4270116265533286, "mc2_stderr": 0.01423822627667514 }, "harness|winogrande|5": { "acc": 0.7853196527229677, "acc_stderr": 0.011539912734345398 }, "harness|gsm8k|5": { "acc": 0.3813495072024261, "acc_stderr": 0.013379089877400729 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_uukuguy__Mistral-7B-OpenOrca-lora-merged
[ "region:us" ]
2024-01-04T12:33:01+00:00
{"pretty_name": "Evaluation run of uukuguy/Mistral-7B-OpenOrca-lora-merged", "dataset_summary": "Dataset automatically created during the evaluation run of model [uukuguy/Mistral-7B-OpenOrca-lora-merged](https://huggingface.co/uukuguy/Mistral-7B-OpenOrca-lora-merged) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_uukuguy__Mistral-7B-OpenOrca-lora-merged\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:30:42.167357](https://huggingface.co/datasets/open-llm-leaderboard/details_uukuguy__Mistral-7B-OpenOrca-lora-merged/blob/main/results_2024-01-04T12-30-42.167357.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6401848053710576,\n \"acc_stderr\": 0.03223183201048062,\n \"acc_norm\": 0.6462692467757035,\n \"acc_norm_stderr\": 0.03287896875364672,\n \"mc1\": 0.2839657282741738,\n \"mc1_stderr\": 0.015785370858396725,\n \"mc2\": 0.4270116265533286,\n \"mc2_stderr\": 0.01423822627667514\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5725255972696246,\n \"acc_stderr\": 0.014456862944650649,\n \"acc_norm\": 0.6177474402730375,\n \"acc_norm_stderr\": 0.014200454049979275\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6360286795459071,\n \"acc_stderr\": 0.0048015720289207925,\n \"acc_norm\": 0.8360884285998805,\n \"acc_norm_stderr\": 0.0036943873611776485\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6578947368421053,\n \"acc_stderr\": 0.03860731599316091,\n \"acc_norm\": 0.6578947368421053,\n \"acc_norm_stderr\": 0.03860731599316091\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.690566037735849,\n \"acc_stderr\": 0.028450154794118637,\n \"acc_norm\": 0.690566037735849,\n \"acc_norm_stderr\": 0.028450154794118637\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.036563436533531585,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.036563436533531585\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.048580835742663454,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.048580835742663454\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5914893617021276,\n \"acc_stderr\": 0.032134180267015755,\n \"acc_norm\": 0.5914893617021276,\n \"acc_norm_stderr\": 0.032134180267015755\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370332,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370332\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3915343915343915,\n \"acc_stderr\": 0.025138091388851112,\n \"acc_norm\": 0.3915343915343915,\n \"acc_norm_stderr\": 0.025138091388851112\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7677419354838709,\n \"acc_stderr\": 0.024022256130308235,\n \"acc_norm\": 0.7677419354838709,\n \"acc_norm_stderr\": 0.024022256130308235\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5320197044334976,\n \"acc_stderr\": 0.035107665979592154,\n \"acc_norm\": 0.5320197044334976,\n \"acc_norm_stderr\": 0.035107665979592154\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7676767676767676,\n \"acc_stderr\": 0.030088629490217487,\n \"acc_norm\": 0.7676767676767676,\n \"acc_norm_stderr\": 0.030088629490217487\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8808290155440415,\n \"acc_stderr\": 0.02338193534812143,\n \"acc_norm\": 0.8808290155440415,\n \"acc_norm_stderr\": 0.02338193534812143\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563973,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563973\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.02911661760608301,\n \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.02911661760608301\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6680672268907563,\n \"acc_stderr\": 0.03058869701378364,\n \"acc_norm\": 0.6680672268907563,\n \"acc_norm_stderr\": 0.03058869701378364\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.31788079470198677,\n \"acc_stderr\": 0.038020397601079024,\n \"acc_norm\": 0.31788079470198677,\n \"acc_norm_stderr\": 0.038020397601079024\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.818348623853211,\n \"acc_stderr\": 0.016530617409266875,\n \"acc_norm\": 0.818348623853211,\n \"acc_norm_stderr\": 0.016530617409266875\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.03381200005643527,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.03381200005643527\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.803921568627451,\n \"acc_stderr\": 0.027865942286639318,\n \"acc_norm\": 0.803921568627451,\n \"acc_norm_stderr\": 0.027865942286639318\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7637130801687764,\n \"acc_stderr\": 0.027652153144159263,\n \"acc_norm\": 0.7637130801687764,\n \"acc_norm_stderr\": 0.027652153144159263\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7085201793721974,\n \"acc_stderr\": 0.03050028317654585,\n \"acc_norm\": 0.7085201793721974,\n \"acc_norm_stderr\": 0.03050028317654585\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7862595419847328,\n \"acc_stderr\": 0.0359546161177469,\n \"acc_norm\": 0.7862595419847328,\n \"acc_norm_stderr\": 0.0359546161177469\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7914110429447853,\n \"acc_stderr\": 0.031921934489347235,\n \"acc_norm\": 0.7914110429447853,\n \"acc_norm_stderr\": 0.031921934489347235\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.04738975119274155,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.04738975119274155\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.03989139859531771,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.03989139859531771\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8846153846153846,\n \"acc_stderr\": 0.020930193185179333,\n \"acc_norm\": 0.8846153846153846,\n \"acc_norm_stderr\": 0.020930193185179333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8122605363984674,\n \"acc_stderr\": 0.013964393769899133,\n \"acc_norm\": 0.8122605363984674,\n \"acc_norm_stderr\": 0.013964393769899133\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7167630057803468,\n \"acc_stderr\": 0.024257901705323378,\n \"acc_norm\": 0.7167630057803468,\n \"acc_norm_stderr\": 0.024257901705323378\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.33743016759776534,\n \"acc_stderr\": 0.015813901283913044,\n \"acc_norm\": 0.33743016759776534,\n \"acc_norm_stderr\": 0.015813901283913044\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7483660130718954,\n \"acc_stderr\": 0.0248480182638752,\n \"acc_norm\": 0.7483660130718954,\n \"acc_norm_stderr\": 0.0248480182638752\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n \"acc_stderr\": 0.02567025924218894,\n \"acc_norm\": 0.7138263665594855,\n \"acc_norm_stderr\": 0.02567025924218894\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7345679012345679,\n \"acc_stderr\": 0.024569223600460845,\n \"acc_norm\": 0.7345679012345679,\n \"acc_norm_stderr\": 0.024569223600460845\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.44589308996088656,\n \"acc_stderr\": 0.012695244711379774,\n \"acc_norm\": 0.44589308996088656,\n \"acc_norm_stderr\": 0.012695244711379774\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6875,\n \"acc_stderr\": 0.02815637344037142,\n \"acc_norm\": 0.6875,\n \"acc_norm_stderr\": 0.02815637344037142\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6813725490196079,\n \"acc_stderr\": 0.018850084696468712,\n \"acc_norm\": 0.6813725490196079,\n \"acc_norm_stderr\": 0.018850084696468712\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.028263889943784593,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.028263889943784593\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.026508590656233264,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.026508590656233264\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2839657282741738,\n \"mc1_stderr\": 0.015785370858396725,\n \"mc2\": 0.4270116265533286,\n \"mc2_stderr\": 0.01423822627667514\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7853196527229677,\n \"acc_stderr\": 0.011539912734345398\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3813495072024261,\n \"acc_stderr\": 0.013379089877400729\n }\n}\n```", "repo_url": "https://huggingface.co/uukuguy/Mistral-7B-OpenOrca-lora-merged", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-30-42.167357.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["**/details_harness|winogrande|5_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-30-42.167357.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_30_42.167357", "path": ["results_2024-01-04T12-30-42.167357.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-30-42.167357.parquet"]}]}]}
2024-01-04T12:33:23+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of uukuguy/Mistral-7B-OpenOrca-lora-merged Dataset automatically created during the evaluation run of model uukuguy/Mistral-7B-OpenOrca-lora-merged on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:30:42.167357(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of uukuguy/Mistral-7B-OpenOrca-lora-merged\n\n\n\nDataset automatically created during the evaluation run of model uukuguy/Mistral-7B-OpenOrca-lora-merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:30:42.167357(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of uukuguy/Mistral-7B-OpenOrca-lora-merged\n\n\n\nDataset automatically created during the evaluation run of model uukuguy/Mistral-7B-OpenOrca-lora-merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:30:42.167357(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 199, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of uukuguy/Mistral-7B-OpenOrca-lora-merged\n\n\n\nDataset automatically created during the evaluation run of model uukuguy/Mistral-7B-OpenOrca-lora-merged on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:30:42.167357(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
cc9eb70ec31b91a1045737057b6f6465251479fa
# Dataset Card for Evaluation run of DopeorNope/SOLARC-M-10.7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [DopeorNope/SOLARC-M-10.7B](https://huggingface.co/DopeorNope/SOLARC-M-10.7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_DopeorNope__SOLARC-M-10.7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:32:38.431063](https://huggingface.co/datasets/open-llm-leaderboard/details_DopeorNope__SOLARC-M-10.7B/blob/main/results_2024-01-04T12-32-38.431063.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6669558592994752, "acc_stderr": 0.03159525026454693, "acc_norm": 0.667691393491765, "acc_norm_stderr": 0.03223875437522202, "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314747, "mc2": 0.7185061667944077, "mc2_stderr": 0.015014851042298718 }, "harness|arc:challenge|25": { "acc": 0.6851535836177475, "acc_stderr": 0.01357265770308495, "acc_norm": 0.71160409556314, "acc_norm_stderr": 0.013238394422428173 }, "harness|hellaswag|10": { "acc": 0.7133041226847242, "acc_stderr": 0.004512940497462742, "acc_norm": 0.8840868352917746, "acc_norm_stderr": 0.003194665266078602 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.756578947368421, "acc_stderr": 0.034923496688842384, "acc_norm": 0.756578947368421, "acc_norm_stderr": 0.034923496688842384 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956913, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956913 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.625531914893617, "acc_stderr": 0.03163910665367291, "acc_norm": 0.625531914893617, "acc_norm_stderr": 0.03163910665367291 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6275862068965518, "acc_stderr": 0.04028731532947558, "acc_norm": 0.6275862068965518, "acc_norm_stderr": 0.04028731532947558 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4947089947089947, "acc_stderr": 0.02574986828855657, "acc_norm": 0.4947089947089947, "acc_norm_stderr": 0.02574986828855657 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8193548387096774, "acc_stderr": 0.021886178567172534, "acc_norm": 0.8193548387096774, "acc_norm_stderr": 0.021886178567172534 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.03517945038691063, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603348, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603348 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563976, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563976 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.029381620726465073, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.029381620726465073 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.029344572500634332, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.029344572500634332 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374308, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374308 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5787037037037037, "acc_stderr": 0.033674621388960775, "acc_norm": 0.5787037037037037, "acc_norm_stderr": 0.033674621388960775 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156862, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156862 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8481012658227848, "acc_stderr": 0.023363878096632446, "acc_norm": 0.8481012658227848, "acc_norm_stderr": 0.023363878096632446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596915, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596915 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.033519538795212696, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.03492606476623791, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.03492606476623791 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.0230866350868414, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.0230866350868414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8033205619412516, "acc_stderr": 0.014214138556913917, "acc_norm": 0.8033205619412516, "acc_norm_stderr": 0.014214138556913917 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7572254335260116, "acc_stderr": 0.023083658586984204, "acc_norm": 0.7572254335260116, "acc_norm_stderr": 0.023083658586984204 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.39329608938547483, "acc_stderr": 0.016337268694270105, "acc_norm": 0.39329608938547483, "acc_norm_stderr": 0.016337268694270105 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7581699346405228, "acc_stderr": 0.024518195641879334, "acc_norm": 0.7581699346405228, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.729903536977492, "acc_stderr": 0.02521804037341062, "acc_norm": 0.729903536977492, "acc_norm_stderr": 0.02521804037341062 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0227797190887334, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0227797190887334 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4934810951760104, "acc_stderr": 0.012769150688867503, "acc_norm": 0.4934810951760104, "acc_norm_stderr": 0.012769150688867503 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7389705882352942, "acc_stderr": 0.026679252270103128, "acc_norm": 0.7389705882352942, "acc_norm_stderr": 0.026679252270103128 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6797385620915033, "acc_stderr": 0.018875682938069446, "acc_norm": 0.6797385620915033, "acc_norm_stderr": 0.018875682938069446 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142783, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142783 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598053, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598053 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.783625730994152, "acc_stderr": 0.03158149539338733, "acc_norm": 0.783625730994152, "acc_norm_stderr": 0.03158149539338733 }, "harness|truthfulqa:mc|0": { "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314747, "mc2": 0.7185061667944077, "mc2_stderr": 0.015014851042298718 }, "harness|winogrande|5": { "acc": 0.8334648776637726, "acc_stderr": 0.010470796496781091 }, "harness|gsm8k|5": { "acc": 0.6542835481425322, "acc_stderr": 0.013100422990441573 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_DopeorNope__SOLARC-M-10.7B
[ "region:us" ]
2024-01-04T12:34:57+00:00
{"pretty_name": "Evaluation run of DopeorNope/SOLARC-M-10.7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [DopeorNope/SOLARC-M-10.7B](https://huggingface.co/DopeorNope/SOLARC-M-10.7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_DopeorNope__SOLARC-M-10.7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:32:38.431063](https://huggingface.co/datasets/open-llm-leaderboard/details_DopeorNope__SOLARC-M-10.7B/blob/main/results_2024-01-04T12-32-38.431063.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6669558592994752,\n \"acc_stderr\": 0.03159525026454693,\n \"acc_norm\": 0.667691393491765,\n \"acc_norm_stderr\": 0.03223875437522202,\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314747,\n \"mc2\": 0.7185061667944077,\n \"mc2_stderr\": 0.015014851042298718\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6851535836177475,\n \"acc_stderr\": 0.01357265770308495,\n \"acc_norm\": 0.71160409556314,\n \"acc_norm_stderr\": 0.013238394422428173\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7133041226847242,\n \"acc_stderr\": 0.004512940497462742,\n \"acc_norm\": 0.8840868352917746,\n \"acc_norm_stderr\": 0.003194665266078602\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.756578947368421,\n \"acc_stderr\": 0.034923496688842384,\n \"acc_norm\": 0.756578947368421,\n \"acc_norm_stderr\": 0.034923496688842384\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956913,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956913\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.625531914893617,\n \"acc_stderr\": 0.03163910665367291,\n \"acc_norm\": 0.625531914893617,\n \"acc_norm_stderr\": 0.03163910665367291\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6275862068965518,\n \"acc_stderr\": 0.04028731532947558,\n \"acc_norm\": 0.6275862068965518,\n \"acc_norm_stderr\": 0.04028731532947558\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4947089947089947,\n \"acc_stderr\": 0.02574986828855657,\n \"acc_norm\": 0.4947089947089947,\n \"acc_norm_stderr\": 0.02574986828855657\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8193548387096774,\n \"acc_stderr\": 0.021886178567172534,\n \"acc_norm\": 0.8193548387096774,\n \"acc_norm_stderr\": 0.021886178567172534\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563976,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563976\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.029381620726465073,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.029381620726465073\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.029344572500634332,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.029344572500634332\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374308,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374308\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5787037037037037,\n \"acc_stderr\": 0.033674621388960775,\n \"acc_norm\": 0.5787037037037037,\n \"acc_norm_stderr\": 0.033674621388960775\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156862,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156862\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8481012658227848,\n \"acc_stderr\": 0.023363878096632446,\n \"acc_norm\": 0.8481012658227848,\n \"acc_norm_stderr\": 0.023363878096632446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596915,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596915\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.03492606476623791,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.03492606476623791\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.0230866350868414,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.0230866350868414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8033205619412516,\n \"acc_stderr\": 0.014214138556913917,\n \"acc_norm\": 0.8033205619412516,\n \"acc_norm_stderr\": 0.014214138556913917\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7572254335260116,\n \"acc_stderr\": 0.023083658586984204,\n \"acc_norm\": 0.7572254335260116,\n \"acc_norm_stderr\": 0.023083658586984204\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.39329608938547483,\n \"acc_stderr\": 0.016337268694270105,\n \"acc_norm\": 0.39329608938547483,\n \"acc_norm_stderr\": 0.016337268694270105\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7581699346405228,\n \"acc_stderr\": 0.024518195641879334,\n \"acc_norm\": 0.7581699346405228,\n \"acc_norm_stderr\": 0.024518195641879334\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.729903536977492,\n \"acc_stderr\": 0.02521804037341062,\n \"acc_norm\": 0.729903536977492,\n \"acc_norm_stderr\": 0.02521804037341062\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0227797190887334,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0227797190887334\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4934810951760104,\n \"acc_stderr\": 0.012769150688867503,\n \"acc_norm\": 0.4934810951760104,\n \"acc_norm_stderr\": 0.012769150688867503\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7389705882352942,\n \"acc_stderr\": 0.026679252270103128,\n \"acc_norm\": 0.7389705882352942,\n \"acc_norm_stderr\": 0.026679252270103128\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.018875682938069446,\n \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.018875682938069446\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142783,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142783\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.03158149539338733,\n \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.03158149539338733\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314747,\n \"mc2\": 0.7185061667944077,\n \"mc2_stderr\": 0.015014851042298718\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8334648776637726,\n \"acc_stderr\": 0.010470796496781091\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6542835481425322,\n \"acc_stderr\": 0.013100422990441573\n }\n}\n```", "repo_url": "https://huggingface.co/DopeorNope/SOLARC-M-10.7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-32-38.431063.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["**/details_harness|winogrande|5_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-32-38.431063.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_32_38.431063", "path": ["results_2024-01-04T12-32-38.431063.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-32-38.431063.parquet"]}]}]}
2024-01-04T12:35:21+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of DopeorNope/SOLARC-M-10.7B Dataset automatically created during the evaluation run of model DopeorNope/SOLARC-M-10.7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:32:38.431063(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of DopeorNope/SOLARC-M-10.7B\n\n\n\nDataset automatically created during the evaluation run of model DopeorNope/SOLARC-M-10.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:32:38.431063(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of DopeorNope/SOLARC-M-10.7B\n\n\n\nDataset automatically created during the evaluation run of model DopeorNope/SOLARC-M-10.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:32:38.431063(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of DopeorNope/SOLARC-M-10.7B\n\n\n\nDataset automatically created during the evaluation run of model DopeorNope/SOLARC-M-10.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:32:38.431063(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
8c330900434ed21735caf7244dca533cc8e8b76e
# Dataset Card for Evaluation run of cookinai/CatMacaroni14 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [cookinai/CatMacaroni14](https://huggingface.co/cookinai/CatMacaroni14) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_cookinai__CatMacaroni14", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:34:22.908525](https://huggingface.co/datasets/open-llm-leaderboard/details_cookinai__CatMacaroni14/blob/main/results_2024-01-04T12-34-22.908525.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6552408787208266, "acc_stderr": 0.032020335411090985, "acc_norm": 0.6550578143115945, "acc_norm_stderr": 0.032683585526071146, "mc1": 0.44920440636474906, "mc1_stderr": 0.017412941986115305, "mc2": 0.6158322886811354, "mc2_stderr": 0.015113986765130786 }, "harness|arc:challenge|25": { "acc": 0.6646757679180887, "acc_stderr": 0.01379618294778556, "acc_norm": 0.6911262798634812, "acc_norm_stderr": 0.013501770929344003 }, "harness|hellaswag|10": { "acc": 0.6831308504282015, "acc_stderr": 0.004643050902503913, "acc_norm": 0.8692491535550687, "acc_norm_stderr": 0.003364386713542236 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04072314811876837, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04072314811876837 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6710526315789473, "acc_stderr": 0.038234289699266046, "acc_norm": 0.6710526315789473, "acc_norm_stderr": 0.038234289699266046 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.04852365870939099, "acc_norm": 0.63, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7169811320754716, "acc_stderr": 0.027724236492700914, "acc_norm": 0.7169811320754716, "acc_norm_stderr": 0.027724236492700914 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7847222222222222, "acc_stderr": 0.03437079344106135, "acc_norm": 0.7847222222222222, "acc_norm_stderr": 0.03437079344106135 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6763005780346821, "acc_stderr": 0.035676037996391706, "acc_norm": 0.6763005780346821, "acc_norm_stderr": 0.035676037996391706 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287534, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287534 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768077, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5829787234042553, "acc_stderr": 0.03223276266711712, "acc_norm": 0.5829787234042553, "acc_norm_stderr": 0.03223276266711712 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555498, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555498 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.42857142857142855, "acc_stderr": 0.025487187147859375, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.025487187147859375 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7806451612903226, "acc_stderr": 0.023540799358723295, "acc_norm": 0.7806451612903226, "acc_norm_stderr": 0.023540799358723295 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.04560480215720684, "acc_norm": 0.71, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267045, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267045 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.021995311963644237, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.021995311963644237 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402534, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402534 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37407407407407406, "acc_stderr": 0.02950286112895529, "acc_norm": 0.37407407407407406, "acc_norm_stderr": 0.02950286112895529 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.680672268907563, "acc_stderr": 0.030283995525884396, "acc_norm": 0.680672268907563, "acc_norm_stderr": 0.030283995525884396 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.03822746937658752, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.03822746937658752 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5138888888888888, "acc_stderr": 0.034086558679777494, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.034086558679777494 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.025845017986926917, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.025845017986926917 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290902, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290902 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159464, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159464 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098822, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098822 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7730061349693251, "acc_stderr": 0.03291099578615769, "acc_norm": 0.7730061349693251, "acc_norm_stderr": 0.03291099578615769 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8365261813537676, "acc_stderr": 0.013223928616741624, "acc_norm": 0.8365261813537676, "acc_norm_stderr": 0.013223928616741624 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7485549132947977, "acc_stderr": 0.02335736578587403, "acc_norm": 0.7485549132947977, "acc_norm_stderr": 0.02335736578587403 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.423463687150838, "acc_stderr": 0.016525425898773496, "acc_norm": 0.423463687150838, "acc_norm_stderr": 0.016525425898773496 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137897, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137897 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7138263665594855, "acc_stderr": 0.025670259242188933, "acc_norm": 0.7138263665594855, "acc_norm_stderr": 0.025670259242188933 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7592592592592593, "acc_stderr": 0.02378858355165854, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.02378858355165854 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48226950354609927, "acc_stderr": 0.02980873964223777, "acc_norm": 0.48226950354609927, "acc_norm_stderr": 0.02980873964223777 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46740547588005216, "acc_stderr": 0.012743072942653345, "acc_norm": 0.46740547588005216, "acc_norm_stderr": 0.012743072942653345 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6838235294117647, "acc_stderr": 0.02824568739146292, "acc_norm": 0.6838235294117647, "acc_norm_stderr": 0.02824568739146292 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6895424836601307, "acc_stderr": 0.018718067052623234, "acc_norm": 0.6895424836601307, "acc_norm_stderr": 0.018718067052623234 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8557213930348259, "acc_stderr": 0.024845753212306053, "acc_norm": 0.8557213930348259, "acc_norm_stderr": 0.024845753212306053 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197769, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197769 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.44920440636474906, "mc1_stderr": 0.017412941986115305, "mc2": 0.6158322886811354, "mc2_stderr": 0.015113986765130786 }, "harness|winogrande|5": { "acc": 0.8105761641673244, "acc_stderr": 0.011012790432989247 }, "harness|gsm8k|5": { "acc": 0.7232752084912812, "acc_stderr": 0.012323047397959792 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_cookinai__CatMacaroni14
[ "region:us" ]
2024-01-04T12:36:42+00:00
{"pretty_name": "Evaluation run of cookinai/CatMacaroni14", "dataset_summary": "Dataset automatically created during the evaluation run of model [cookinai/CatMacaroni14](https://huggingface.co/cookinai/CatMacaroni14) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cookinai__CatMacaroni14\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:34:22.908525](https://huggingface.co/datasets/open-llm-leaderboard/details_cookinai__CatMacaroni14/blob/main/results_2024-01-04T12-34-22.908525.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6552408787208266,\n \"acc_stderr\": 0.032020335411090985,\n \"acc_norm\": 0.6550578143115945,\n \"acc_norm_stderr\": 0.032683585526071146,\n \"mc1\": 0.44920440636474906,\n \"mc1_stderr\": 0.017412941986115305,\n \"mc2\": 0.6158322886811354,\n \"mc2_stderr\": 0.015113986765130786\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6646757679180887,\n \"acc_stderr\": 0.01379618294778556,\n \"acc_norm\": 0.6911262798634812,\n \"acc_norm_stderr\": 0.013501770929344003\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6831308504282015,\n \"acc_stderr\": 0.004643050902503913,\n \"acc_norm\": 0.8692491535550687,\n \"acc_norm_stderr\": 0.003364386713542236\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04072314811876837,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04072314811876837\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6710526315789473,\n \"acc_stderr\": 0.038234289699266046,\n \"acc_norm\": 0.6710526315789473,\n \"acc_norm_stderr\": 0.038234289699266046\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7169811320754716,\n \"acc_stderr\": 0.027724236492700914,\n \"acc_norm\": 0.7169811320754716,\n \"acc_norm_stderr\": 0.027724236492700914\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7847222222222222,\n \"acc_stderr\": 0.03437079344106135,\n \"acc_norm\": 0.7847222222222222,\n \"acc_norm_stderr\": 0.03437079344106135\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6763005780346821,\n \"acc_stderr\": 0.035676037996391706,\n \"acc_norm\": 0.6763005780346821,\n \"acc_norm_stderr\": 0.035676037996391706\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287534,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287534\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768077,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768077\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5829787234042553,\n \"acc_stderr\": 0.03223276266711712,\n \"acc_norm\": 0.5829787234042553,\n \"acc_norm_stderr\": 0.03223276266711712\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555498,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555498\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.025487187147859375,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.025487187147859375\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7806451612903226,\n \"acc_stderr\": 0.023540799358723295,\n \"acc_norm\": 0.7806451612903226,\n \"acc_norm_stderr\": 0.023540799358723295\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267045,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267045\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.021995311963644237,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.021995311963644237\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402534,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402534\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37407407407407406,\n \"acc_stderr\": 0.02950286112895529,\n \"acc_norm\": 0.37407407407407406,\n \"acc_norm_stderr\": 0.02950286112895529\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.030283995525884396,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.030283995525884396\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.03822746937658752,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.03822746937658752\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.034086558679777494,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.034086558679777494\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926917,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926917\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290902,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290902\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159464,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159464\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098822,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098822\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7730061349693251,\n \"acc_stderr\": 0.03291099578615769,\n \"acc_norm\": 0.7730061349693251,\n \"acc_norm_stderr\": 0.03291099578615769\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8365261813537676,\n \"acc_stderr\": 0.013223928616741624,\n \"acc_norm\": 0.8365261813537676,\n \"acc_norm_stderr\": 0.013223928616741624\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7485549132947977,\n \"acc_stderr\": 0.02335736578587403,\n \"acc_norm\": 0.7485549132947977,\n \"acc_norm_stderr\": 0.02335736578587403\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.423463687150838,\n \"acc_stderr\": 0.016525425898773496,\n \"acc_norm\": 0.423463687150838,\n \"acc_norm_stderr\": 0.016525425898773496\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137897,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137897\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n \"acc_stderr\": 0.025670259242188933,\n \"acc_norm\": 0.7138263665594855,\n \"acc_norm_stderr\": 0.025670259242188933\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.02378858355165854,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.02378858355165854\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48226950354609927,\n \"acc_stderr\": 0.02980873964223777,\n \"acc_norm\": 0.48226950354609927,\n \"acc_norm_stderr\": 0.02980873964223777\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46740547588005216,\n \"acc_stderr\": 0.012743072942653345,\n \"acc_norm\": 0.46740547588005216,\n \"acc_norm_stderr\": 0.012743072942653345\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.02824568739146292,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.02824568739146292\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6895424836601307,\n \"acc_stderr\": 0.018718067052623234,\n \"acc_norm\": 0.6895424836601307,\n \"acc_norm_stderr\": 0.018718067052623234\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8557213930348259,\n \"acc_stderr\": 0.024845753212306053,\n \"acc_norm\": 0.8557213930348259,\n \"acc_norm_stderr\": 0.024845753212306053\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197769,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197769\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.44920440636474906,\n \"mc1_stderr\": 0.017412941986115305,\n \"mc2\": 0.6158322886811354,\n \"mc2_stderr\": 0.015113986765130786\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8105761641673244,\n \"acc_stderr\": 0.011012790432989247\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7232752084912812,\n \"acc_stderr\": 0.012323047397959792\n }\n}\n```", "repo_url": "https://huggingface.co/cookinai/CatMacaroni14", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-34-22.908525.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["**/details_harness|winogrande|5_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-34-22.908525.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_34_22.908525", "path": ["results_2024-01-04T12-34-22.908525.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-34-22.908525.parquet"]}]}]}
2024-01-04T12:37:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of cookinai/CatMacaroni14 Dataset automatically created during the evaluation run of model cookinai/CatMacaroni14 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:34:22.908525(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of cookinai/CatMacaroni14\n\n\n\nDataset automatically created during the evaluation run of model cookinai/CatMacaroni14 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:34:22.908525(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of cookinai/CatMacaroni14\n\n\n\nDataset automatically created during the evaluation run of model cookinai/CatMacaroni14 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:34:22.908525(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 177, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of cookinai/CatMacaroni14\n\n\n\nDataset automatically created during the evaluation run of model cookinai/CatMacaroni14 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:34:22.908525(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
6857e45a9a619217318846cf982aa68fe1c689a4
# Dataset Card for Evaluation run of instructkr/ko-wand-136M <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [instructkr/ko-wand-136M](https://huggingface.co/instructkr/ko-wand-136M) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_instructkr__ko-wand-136M", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:38:55.443031](https://huggingface.co/datasets/open-llm-leaderboard/details_instructkr__ko-wand-136M/blob/main/results_2024-01-04T12-38-55.443031.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.23562400220828256, "acc_stderr": 0.030107417401823856, "acc_norm": 0.2356441730715218, "acc_norm_stderr": 0.030900545686268644, "mc1": 0.25458996328029376, "mc1_stderr": 0.015250117079156479, "mc2": 0.5068400671997387, "mc2_stderr": 0.01591053504901188 }, "harness|arc:challenge|25": { "acc": 0.1825938566552901, "acc_stderr": 0.011289730684564982, "acc_norm": 0.21331058020477817, "acc_norm_stderr": 0.011970971742326334 }, "harness|hellaswag|10": { "acc": 0.2591117307309301, "acc_stderr": 0.0043725160601647516, "acc_norm": 0.2500497908783111, "acc_norm_stderr": 0.004321564303822428 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2222222222222222, "acc_stderr": 0.035914440841969694, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.035914440841969694 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.21052631578947367, "acc_stderr": 0.033176727875331574, "acc_norm": 0.21052631578947367, "acc_norm_stderr": 0.033176727875331574 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.21509433962264152, "acc_stderr": 0.025288394502891366, "acc_norm": 0.21509433962264152, "acc_norm_stderr": 0.025288394502891366 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2847222222222222, "acc_stderr": 0.03773809990686934, "acc_norm": 0.2847222222222222, "acc_norm_stderr": 0.03773809990686934 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.2, "acc_stderr": 0.04020151261036846, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2138728323699422, "acc_stderr": 0.03126511206173044, "acc_norm": 0.2138728323699422, "acc_norm_stderr": 0.03126511206173044 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.28431372549019607, "acc_stderr": 0.04488482852329017, "acc_norm": 0.28431372549019607, "acc_norm_stderr": 0.04488482852329017 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.16, "acc_stderr": 0.0368452949177471, "acc_norm": 0.16, "acc_norm_stderr": 0.0368452949177471 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2553191489361702, "acc_stderr": 0.0285048564705142, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.0285048564705142 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.040969851398436695, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.040969851398436695 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.25517241379310346, "acc_stderr": 0.03632984052707842, "acc_norm": 0.25517241379310346, "acc_norm_stderr": 0.03632984052707842 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.21693121693121692, "acc_stderr": 0.021227082449445055, "acc_norm": 0.21693121693121692, "acc_norm_stderr": 0.021227082449445055 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23809523809523808, "acc_stderr": 0.038095238095238106, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.038095238095238106 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.16, "acc_stderr": 0.03684529491774709, "acc_norm": 0.16, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.18387096774193548, "acc_stderr": 0.02203721734026784, "acc_norm": 0.18387096774193548, "acc_norm_stderr": 0.02203721734026784 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.21674876847290642, "acc_stderr": 0.028990331252516235, "acc_norm": 0.21674876847290642, "acc_norm_stderr": 0.028990331252516235 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2545454545454545, "acc_stderr": 0.03401506715249039, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.03401506715249039 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.21212121212121213, "acc_stderr": 0.029126522834586825, "acc_norm": 0.21212121212121213, "acc_norm_stderr": 0.029126522834586825 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.24870466321243523, "acc_stderr": 0.031195840877700304, "acc_norm": 0.24870466321243523, "acc_norm_stderr": 0.031195840877700304 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.28717948717948716, "acc_stderr": 0.02293992541853061, "acc_norm": 0.28717948717948716, "acc_norm_stderr": 0.02293992541853061 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02671924078371216, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02671924078371216 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.25210084033613445, "acc_stderr": 0.02820554503327772, "acc_norm": 0.25210084033613445, "acc_norm_stderr": 0.02820554503327772 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.17880794701986755, "acc_stderr": 0.031287448506007245, "acc_norm": 0.17880794701986755, "acc_norm_stderr": 0.031287448506007245 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.23119266055045873, "acc_stderr": 0.01807575024163315, "acc_norm": 0.23119266055045873, "acc_norm_stderr": 0.01807575024163315 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.21296296296296297, "acc_stderr": 0.027920963147993662, "acc_norm": 0.21296296296296297, "acc_norm_stderr": 0.027920963147993662 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.27941176470588236, "acc_stderr": 0.031493281045079556, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.031493281045079556 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.25738396624472576, "acc_stderr": 0.028458820991460288, "acc_norm": 0.25738396624472576, "acc_norm_stderr": 0.028458820991460288 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.2062780269058296, "acc_stderr": 0.027157150479563824, "acc_norm": 0.2062780269058296, "acc_norm_stderr": 0.027157150479563824 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.22900763358778625, "acc_stderr": 0.036853466317118506, "acc_norm": 0.22900763358778625, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2644628099173554, "acc_stderr": 0.04026187527591204, "acc_norm": 0.2644628099173554, "acc_norm_stderr": 0.04026187527591204 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2085889570552147, "acc_stderr": 0.03192193448934723, "acc_norm": 0.2085889570552147, "acc_norm_stderr": 0.03192193448934723 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.2767857142857143, "acc_stderr": 0.042466243366976256, "acc_norm": 0.2767857142857143, "acc_norm_stderr": 0.042466243366976256 }, "harness|hendrycksTest-management|5": { "acc": 0.32038834951456313, "acc_stderr": 0.046202840822800406, "acc_norm": 0.32038834951456313, "acc_norm_stderr": 0.046202840822800406 }, "harness|hendrycksTest-marketing|5": { "acc": 0.2948717948717949, "acc_stderr": 0.029872577708891155, "acc_norm": 0.2948717948717949, "acc_norm_stderr": 0.029872577708891155 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.2, "acc_stderr": 0.04020151261036843, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036843 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.21583652618135377, "acc_stderr": 0.014711684386139935, "acc_norm": 0.21583652618135377, "acc_norm_stderr": 0.014711684386139935 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.23121387283236994, "acc_stderr": 0.022698657167855716, "acc_norm": 0.23121387283236994, "acc_norm_stderr": 0.022698657167855716 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2659217877094972, "acc_stderr": 0.01477676506643889, "acc_norm": 0.2659217877094972, "acc_norm_stderr": 0.01477676506643889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.21241830065359477, "acc_stderr": 0.02342037547829613, "acc_norm": 0.21241830065359477, "acc_norm_stderr": 0.02342037547829613 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.18971061093247588, "acc_stderr": 0.022268196258783225, "acc_norm": 0.18971061093247588, "acc_norm_stderr": 0.022268196258783225 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.19753086419753085, "acc_stderr": 0.022152889927898947, "acc_norm": 0.19753086419753085, "acc_norm_stderr": 0.022152889927898947 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.20212765957446807, "acc_stderr": 0.023956668237850247, "acc_norm": 0.20212765957446807, "acc_norm_stderr": 0.023956668237850247 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2470664928292047, "acc_stderr": 0.011015752255279338, "acc_norm": 0.2470664928292047, "acc_norm_stderr": 0.011015752255279338 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.1948529411764706, "acc_stderr": 0.024060599423487424, "acc_norm": 0.1948529411764706, "acc_norm_stderr": 0.024060599423487424 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.26633986928104575, "acc_stderr": 0.017883188134667192, "acc_norm": 0.26633986928104575, "acc_norm_stderr": 0.017883188134667192 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03955932861795833, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03955932861795833 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.22448979591836735, "acc_stderr": 0.02671143055553841, "acc_norm": 0.22448979591836735, "acc_norm_stderr": 0.02671143055553841 }, "harness|hendrycksTest-sociology|5": { "acc": 0.26865671641791045, "acc_stderr": 0.03134328358208954, "acc_norm": 0.26865671641791045, "acc_norm_stderr": 0.03134328358208954 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.28, "acc_stderr": 0.04512608598542129, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542129 }, "harness|hendrycksTest-virology|5": { "acc": 0.24096385542168675, "acc_stderr": 0.033293941190735296, "acc_norm": 0.24096385542168675, "acc_norm_stderr": 0.033293941190735296 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.26900584795321636, "acc_stderr": 0.0340105262010409, "acc_norm": 0.26900584795321636, "acc_norm_stderr": 0.0340105262010409 }, "harness|truthfulqa:mc|0": { "mc1": 0.25458996328029376, "mc1_stderr": 0.015250117079156479, "mc2": 0.5068400671997387, "mc2_stderr": 0.01591053504901188 }, "harness|winogrande|5": { "acc": 0.49171270718232046, "acc_stderr": 0.014050555322824192 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_instructkr__ko-wand-136M
[ "region:us" ]
2024-01-04T12:41:21+00:00
{"pretty_name": "Evaluation run of instructkr/ko-wand-136M", "dataset_summary": "Dataset automatically created during the evaluation run of model [instructkr/ko-wand-136M](https://huggingface.co/instructkr/ko-wand-136M) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_instructkr__ko-wand-136M\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:38:55.443031](https://huggingface.co/datasets/open-llm-leaderboard/details_instructkr__ko-wand-136M/blob/main/results_2024-01-04T12-38-55.443031.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.23562400220828256,\n \"acc_stderr\": 0.030107417401823856,\n \"acc_norm\": 0.2356441730715218,\n \"acc_norm_stderr\": 0.030900545686268644,\n \"mc1\": 0.25458996328029376,\n \"mc1_stderr\": 0.015250117079156479,\n \"mc2\": 0.5068400671997387,\n \"mc2_stderr\": 0.01591053504901188\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.1825938566552901,\n \"acc_stderr\": 0.011289730684564982,\n \"acc_norm\": 0.21331058020477817,\n \"acc_norm_stderr\": 0.011970971742326334\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2591117307309301,\n \"acc_stderr\": 0.0043725160601647516,\n \"acc_norm\": 0.2500497908783111,\n \"acc_norm_stderr\": 0.004321564303822428\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.035914440841969694,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.035914440841969694\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.21052631578947367,\n \"acc_stderr\": 0.033176727875331574,\n \"acc_norm\": 0.21052631578947367,\n \"acc_norm_stderr\": 0.033176727875331574\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.21509433962264152,\n \"acc_stderr\": 0.025288394502891366,\n \"acc_norm\": 0.21509433962264152,\n \"acc_norm_stderr\": 0.025288394502891366\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2847222222222222,\n \"acc_stderr\": 0.03773809990686934,\n \"acc_norm\": 0.2847222222222222,\n \"acc_norm_stderr\": 0.03773809990686934\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2138728323699422,\n \"acc_stderr\": 0.03126511206173044,\n \"acc_norm\": 0.2138728323699422,\n \"acc_norm_stderr\": 0.03126511206173044\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.28431372549019607,\n \"acc_stderr\": 0.04488482852329017,\n \"acc_norm\": 0.28431372549019607,\n \"acc_norm_stderr\": 0.04488482852329017\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.16,\n \"acc_stderr\": 0.0368452949177471,\n \"acc_norm\": 0.16,\n \"acc_norm_stderr\": 0.0368452949177471\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2553191489361702,\n \"acc_stderr\": 0.0285048564705142,\n \"acc_norm\": 0.2553191489361702,\n \"acc_norm_stderr\": 0.0285048564705142\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n \"acc_stderr\": 0.040969851398436695,\n \"acc_norm\": 0.2543859649122807,\n \"acc_norm_stderr\": 0.040969851398436695\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.25517241379310346,\n \"acc_stderr\": 0.03632984052707842,\n \"acc_norm\": 0.25517241379310346,\n \"acc_norm_stderr\": 0.03632984052707842\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.21693121693121692,\n \"acc_stderr\": 0.021227082449445055,\n \"acc_norm\": 0.21693121693121692,\n \"acc_norm_stderr\": 0.021227082449445055\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23809523809523808,\n \"acc_stderr\": 0.038095238095238106,\n \"acc_norm\": 0.23809523809523808,\n \"acc_norm_stderr\": 0.038095238095238106\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.16,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.16,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.18387096774193548,\n \"acc_stderr\": 0.02203721734026784,\n \"acc_norm\": 0.18387096774193548,\n \"acc_norm_stderr\": 0.02203721734026784\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.21674876847290642,\n \"acc_stderr\": 0.028990331252516235,\n \"acc_norm\": 0.21674876847290642,\n \"acc_norm_stderr\": 0.028990331252516235\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932269,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932269\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2545454545454545,\n \"acc_stderr\": 0.03401506715249039,\n \"acc_norm\": 0.2545454545454545,\n \"acc_norm_stderr\": 0.03401506715249039\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.21212121212121213,\n \"acc_stderr\": 0.029126522834586825,\n \"acc_norm\": 0.21212121212121213,\n \"acc_norm_stderr\": 0.029126522834586825\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.24870466321243523,\n \"acc_stderr\": 0.031195840877700304,\n \"acc_norm\": 0.24870466321243523,\n \"acc_norm_stderr\": 0.031195840877700304\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.28717948717948716,\n \"acc_stderr\": 0.02293992541853061,\n \"acc_norm\": 0.28717948717948716,\n \"acc_norm_stderr\": 0.02293992541853061\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.02671924078371216,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.02671924078371216\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.25210084033613445,\n \"acc_stderr\": 0.02820554503327772,\n \"acc_norm\": 0.25210084033613445,\n \"acc_norm_stderr\": 0.02820554503327772\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.17880794701986755,\n \"acc_stderr\": 0.031287448506007245,\n \"acc_norm\": 0.17880794701986755,\n \"acc_norm_stderr\": 0.031287448506007245\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.23119266055045873,\n \"acc_stderr\": 0.01807575024163315,\n \"acc_norm\": 0.23119266055045873,\n \"acc_norm_stderr\": 0.01807575024163315\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.21296296296296297,\n \"acc_stderr\": 0.027920963147993662,\n \"acc_norm\": 0.21296296296296297,\n \"acc_norm_stderr\": 0.027920963147993662\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.27941176470588236,\n \"acc_stderr\": 0.031493281045079556,\n \"acc_norm\": 0.27941176470588236,\n \"acc_norm_stderr\": 0.031493281045079556\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.25738396624472576,\n \"acc_stderr\": 0.028458820991460288,\n \"acc_norm\": 0.25738396624472576,\n \"acc_norm_stderr\": 0.028458820991460288\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.2062780269058296,\n \"acc_stderr\": 0.027157150479563824,\n \"acc_norm\": 0.2062780269058296,\n \"acc_norm_stderr\": 0.027157150479563824\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.22900763358778625,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.22900763358778625,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2644628099173554,\n \"acc_stderr\": 0.04026187527591204,\n \"acc_norm\": 0.2644628099173554,\n \"acc_norm_stderr\": 0.04026187527591204\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2085889570552147,\n \"acc_stderr\": 0.03192193448934723,\n \"acc_norm\": 0.2085889570552147,\n \"acc_norm_stderr\": 0.03192193448934723\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.2767857142857143,\n \"acc_stderr\": 0.042466243366976256,\n \"acc_norm\": 0.2767857142857143,\n \"acc_norm_stderr\": 0.042466243366976256\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.32038834951456313,\n \"acc_stderr\": 0.046202840822800406,\n \"acc_norm\": 0.32038834951456313,\n \"acc_norm_stderr\": 0.046202840822800406\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.2948717948717949,\n \"acc_stderr\": 0.029872577708891155,\n \"acc_norm\": 0.2948717948717949,\n \"acc_norm_stderr\": 0.029872577708891155\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036843,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036843\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.21583652618135377,\n \"acc_stderr\": 0.014711684386139935,\n \"acc_norm\": 0.21583652618135377,\n \"acc_norm_stderr\": 0.014711684386139935\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.23121387283236994,\n \"acc_stderr\": 0.022698657167855716,\n \"acc_norm\": 0.23121387283236994,\n \"acc_norm_stderr\": 0.022698657167855716\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2659217877094972,\n \"acc_stderr\": 0.01477676506643889,\n \"acc_norm\": 0.2659217877094972,\n \"acc_norm_stderr\": 0.01477676506643889\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.21241830065359477,\n \"acc_stderr\": 0.02342037547829613,\n \"acc_norm\": 0.21241830065359477,\n \"acc_norm_stderr\": 0.02342037547829613\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.18971061093247588,\n \"acc_stderr\": 0.022268196258783225,\n \"acc_norm\": 0.18971061093247588,\n \"acc_norm_stderr\": 0.022268196258783225\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.19753086419753085,\n \"acc_stderr\": 0.022152889927898947,\n \"acc_norm\": 0.19753086419753085,\n \"acc_norm_stderr\": 0.022152889927898947\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.20212765957446807,\n \"acc_stderr\": 0.023956668237850247,\n \"acc_norm\": 0.20212765957446807,\n \"acc_norm_stderr\": 0.023956668237850247\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2470664928292047,\n \"acc_stderr\": 0.011015752255279338,\n \"acc_norm\": 0.2470664928292047,\n \"acc_norm_stderr\": 0.011015752255279338\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.1948529411764706,\n \"acc_stderr\": 0.024060599423487424,\n \"acc_norm\": 0.1948529411764706,\n \"acc_norm_stderr\": 0.024060599423487424\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.26633986928104575,\n \"acc_stderr\": 0.017883188134667192,\n \"acc_norm\": 0.26633986928104575,\n \"acc_norm_stderr\": 0.017883188134667192\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03955932861795833,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03955932861795833\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.22448979591836735,\n \"acc_stderr\": 0.02671143055553841,\n \"acc_norm\": 0.22448979591836735,\n \"acc_norm_stderr\": 0.02671143055553841\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.26865671641791045,\n \"acc_stderr\": 0.03134328358208954,\n \"acc_norm\": 0.26865671641791045,\n \"acc_norm_stderr\": 0.03134328358208954\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542129,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542129\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.24096385542168675,\n \"acc_stderr\": 0.033293941190735296,\n \"acc_norm\": 0.24096385542168675,\n \"acc_norm_stderr\": 0.033293941190735296\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.26900584795321636,\n \"acc_stderr\": 0.0340105262010409,\n \"acc_norm\": 0.26900584795321636,\n \"acc_norm_stderr\": 0.0340105262010409\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.25458996328029376,\n \"mc1_stderr\": 0.015250117079156479,\n \"mc2\": 0.5068400671997387,\n \"mc2_stderr\": 0.01591053504901188\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.49171270718232046,\n \"acc_stderr\": 0.014050555322824192\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/instructkr/ko-wand-136M", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-38-55.443031.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["**/details_harness|winogrande|5_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-38-55.443031.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_38_55.443031", "path": ["results_2024-01-04T12-38-55.443031.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-38-55.443031.parquet"]}]}]}
2024-01-04T12:41:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of instructkr/ko-wand-136M Dataset automatically created during the evaluation run of model instructkr/ko-wand-136M on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:38:55.443031(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of instructkr/ko-wand-136M\n\n\n\nDataset automatically created during the evaluation run of model instructkr/ko-wand-136M on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:38:55.443031(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of instructkr/ko-wand-136M\n\n\n\nDataset automatically created during the evaluation run of model instructkr/ko-wand-136M on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:38:55.443031(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of instructkr/ko-wand-136M\n\n\n\nDataset automatically created during the evaluation run of model instructkr/ko-wand-136M on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:38:55.443031(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
5f001431ab6358ba126d3610cce482e5ff0f88a7
# Dataset Card for Evaluation run of kekmodel/StopCarbon-10.7B-v6 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [kekmodel/StopCarbon-10.7B-v6](https://huggingface.co/kekmodel/StopCarbon-10.7B-v6) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_kekmodel__StopCarbon-10.7B-v6", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:45:25.846372](https://huggingface.co/datasets/open-llm-leaderboard/details_kekmodel__StopCarbon-10.7B-v6/blob/main/results_2024-01-04T12-45-25.846372.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6667902726267952, "acc_stderr": 0.03164290381022226, "acc_norm": 0.6676653511210807, "acc_norm_stderr": 0.03228679656142636, "mc1": 0.572827417380661, "mc1_stderr": 0.017316834410963926, "mc2": 0.7196325985947845, "mc2_stderr": 0.014986891722432193 }, "harness|arc:challenge|25": { "acc": 0.6843003412969283, "acc_stderr": 0.013582571095815291, "acc_norm": 0.71160409556314, "acc_norm_stderr": 0.013238394422428173 }, "harness|hellaswag|10": { "acc": 0.714797849034057, "acc_stderr": 0.00450587908460684, "acc_norm": 0.8849830711013742, "acc_norm_stderr": 0.0031839033919416975 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.756578947368421, "acc_stderr": 0.034923496688842384, "acc_norm": 0.756578947368421, "acc_norm_stderr": 0.034923496688842384 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.04461960433384741, "acc_norm": 0.73, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7847222222222222, "acc_stderr": 0.03437079344106135, "acc_norm": 0.7847222222222222, "acc_norm_stderr": 0.03437079344106135 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956913, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956913 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768077, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6212765957446809, "acc_stderr": 0.03170995606040655, "acc_norm": 0.6212765957446809, "acc_norm_stderr": 0.03170995606040655 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6344827586206897, "acc_stderr": 0.040131241954243856, "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.040131241954243856 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.5, "acc_stderr": 0.025751310131230234, "acc_norm": 0.5, "acc_norm_stderr": 0.025751310131230234 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8225806451612904, "acc_stderr": 0.021732540689329286, "acc_norm": 0.8225806451612904, "acc_norm_stderr": 0.021732540689329286 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.03517945038691063, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.021995311963644244, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.021995311963644244 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402534, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402534 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.362962962962963, "acc_stderr": 0.02931820364520686, "acc_norm": 0.362962962962963, "acc_norm_stderr": 0.02931820364520686 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.029344572500634332, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.029344572500634332 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374308, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374308 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5740740740740741, "acc_stderr": 0.03372343271653062, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.03372343271653062 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156862, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156862 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8481012658227848, "acc_stderr": 0.023363878096632446, "acc_norm": 0.8481012658227848, "acc_norm_stderr": 0.023363878096632446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596915, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596915 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.033519538795212696, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.03492606476623791, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.03492606476623791 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.02280138253459753, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.02280138253459753 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8058748403575989, "acc_stderr": 0.014143970276657569, "acc_norm": 0.8058748403575989, "acc_norm_stderr": 0.014143970276657569 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7485549132947977, "acc_stderr": 0.023357365785874037, "acc_norm": 0.7485549132947977, "acc_norm_stderr": 0.023357365785874037 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.394413407821229, "acc_stderr": 0.01634538676210397, "acc_norm": 0.394413407821229, "acc_norm_stderr": 0.01634538676210397 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.761437908496732, "acc_stderr": 0.024404394928087866, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.024404394928087866 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7266881028938906, "acc_stderr": 0.025311765975426122, "acc_norm": 0.7266881028938906, "acc_norm_stderr": 0.025311765975426122 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7808641975308642, "acc_stderr": 0.02301670564026219, "acc_norm": 0.7808641975308642, "acc_norm_stderr": 0.02301670564026219 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.49645390070921985, "acc_stderr": 0.02982674915328092, "acc_norm": 0.49645390070921985, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4941329856584094, "acc_stderr": 0.012769356925216526, "acc_norm": 0.4941329856584094, "acc_norm_stderr": 0.012769356925216526 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7389705882352942, "acc_stderr": 0.026679252270103128, "acc_norm": 0.7389705882352942, "acc_norm_stderr": 0.026679252270103128 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6781045751633987, "acc_stderr": 0.018901015322093092, "acc_norm": 0.6781045751633987, "acc_norm_stderr": 0.018901015322093092 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.027979823538744546, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.027979823538744546 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598053, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598053 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03188578017686398, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03188578017686398 }, "harness|truthfulqa:mc|0": { "mc1": 0.572827417380661, "mc1_stderr": 0.017316834410963926, "mc2": 0.7196325985947845, "mc2_stderr": 0.014986891722432193 }, "harness|winogrande|5": { "acc": 0.8342541436464088, "acc_stderr": 0.010450899545370632 }, "harness|gsm8k|5": { "acc": 0.645185746777862, "acc_stderr": 0.013179083387979205 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_kekmodel__StopCarbon-10.7B-v6
[ "region:us" ]
2024-01-04T12:47:43+00:00
{"pretty_name": "Evaluation run of kekmodel/StopCarbon-10.7B-v6", "dataset_summary": "Dataset automatically created during the evaluation run of model [kekmodel/StopCarbon-10.7B-v6](https://huggingface.co/kekmodel/StopCarbon-10.7B-v6) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_kekmodel__StopCarbon-10.7B-v6\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:45:25.846372](https://huggingface.co/datasets/open-llm-leaderboard/details_kekmodel__StopCarbon-10.7B-v6/blob/main/results_2024-01-04T12-45-25.846372.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6667902726267952,\n \"acc_stderr\": 0.03164290381022226,\n \"acc_norm\": 0.6676653511210807,\n \"acc_norm_stderr\": 0.03228679656142636,\n \"mc1\": 0.572827417380661,\n \"mc1_stderr\": 0.017316834410963926,\n \"mc2\": 0.7196325985947845,\n \"mc2_stderr\": 0.014986891722432193\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6843003412969283,\n \"acc_stderr\": 0.013582571095815291,\n \"acc_norm\": 0.71160409556314,\n \"acc_norm_stderr\": 0.013238394422428173\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.714797849034057,\n \"acc_stderr\": 0.00450587908460684,\n \"acc_norm\": 0.8849830711013742,\n \"acc_norm_stderr\": 0.0031839033919416975\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.756578947368421,\n \"acc_stderr\": 0.034923496688842384,\n \"acc_norm\": 0.756578947368421,\n \"acc_norm_stderr\": 0.034923496688842384\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7847222222222222,\n \"acc_stderr\": 0.03437079344106135,\n \"acc_norm\": 0.7847222222222222,\n \"acc_norm_stderr\": 0.03437079344106135\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956913,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956913\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768077,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768077\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6212765957446809,\n \"acc_stderr\": 0.03170995606040655,\n \"acc_norm\": 0.6212765957446809,\n \"acc_norm_stderr\": 0.03170995606040655\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6344827586206897,\n \"acc_stderr\": 0.040131241954243856,\n \"acc_norm\": 0.6344827586206897,\n \"acc_norm_stderr\": 0.040131241954243856\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.025751310131230234,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.025751310131230234\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8225806451612904,\n \"acc_stderr\": 0.021732540689329286,\n \"acc_norm\": 0.8225806451612904,\n \"acc_norm_stderr\": 0.021732540689329286\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.021995311963644244,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.021995311963644244\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402534,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402534\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.362962962962963,\n \"acc_stderr\": 0.02931820364520686,\n \"acc_norm\": 0.362962962962963,\n \"acc_norm_stderr\": 0.02931820364520686\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.029344572500634332,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.029344572500634332\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374308,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374308\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.03372343271653062,\n \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.03372343271653062\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156862,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156862\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8481012658227848,\n \"acc_stderr\": 0.023363878096632446,\n \"acc_norm\": 0.8481012658227848,\n \"acc_norm_stderr\": 0.023363878096632446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596915,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596915\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.03492606476623791,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.03492606476623791\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.02280138253459753,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.02280138253459753\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8058748403575989,\n \"acc_stderr\": 0.014143970276657569,\n \"acc_norm\": 0.8058748403575989,\n \"acc_norm_stderr\": 0.014143970276657569\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7485549132947977,\n \"acc_stderr\": 0.023357365785874037,\n \"acc_norm\": 0.7485549132947977,\n \"acc_norm_stderr\": 0.023357365785874037\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.394413407821229,\n \"acc_stderr\": 0.01634538676210397,\n \"acc_norm\": 0.394413407821229,\n \"acc_norm_stderr\": 0.01634538676210397\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.761437908496732,\n \"acc_stderr\": 0.024404394928087866,\n \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.024404394928087866\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7266881028938906,\n \"acc_stderr\": 0.025311765975426122,\n \"acc_norm\": 0.7266881028938906,\n \"acc_norm_stderr\": 0.025311765975426122\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7808641975308642,\n \"acc_stderr\": 0.02301670564026219,\n \"acc_norm\": 0.7808641975308642,\n \"acc_norm_stderr\": 0.02301670564026219\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.49645390070921985,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.49645390070921985,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4941329856584094,\n \"acc_stderr\": 0.012769356925216526,\n \"acc_norm\": 0.4941329856584094,\n \"acc_norm_stderr\": 0.012769356925216526\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7389705882352942,\n \"acc_stderr\": 0.026679252270103128,\n \"acc_norm\": 0.7389705882352942,\n \"acc_norm_stderr\": 0.026679252270103128\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6781045751633987,\n \"acc_stderr\": 0.018901015322093092,\n \"acc_norm\": 0.6781045751633987,\n \"acc_norm_stderr\": 0.018901015322093092\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.027979823538744546,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.027979823538744546\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03188578017686398,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03188578017686398\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.572827417380661,\n \"mc1_stderr\": 0.017316834410963926,\n \"mc2\": 0.7196325985947845,\n \"mc2_stderr\": 0.014986891722432193\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8342541436464088,\n \"acc_stderr\": 0.010450899545370632\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.645185746777862,\n \"acc_stderr\": 0.013179083387979205\n }\n}\n```", "repo_url": "https://huggingface.co/kekmodel/StopCarbon-10.7B-v6", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-45-25.846372.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["**/details_harness|winogrande|5_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-45-25.846372.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_45_25.846372", "path": ["results_2024-01-04T12-45-25.846372.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-45-25.846372.parquet"]}]}]}
2024-01-04T12:48:06+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of kekmodel/StopCarbon-10.7B-v6 Dataset automatically created during the evaluation run of model kekmodel/StopCarbon-10.7B-v6 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:45:25.846372(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of kekmodel/StopCarbon-10.7B-v6\n\n\n\nDataset automatically created during the evaluation run of model kekmodel/StopCarbon-10.7B-v6 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:45:25.846372(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of kekmodel/StopCarbon-10.7B-v6\n\n\n\nDataset automatically created during the evaluation run of model kekmodel/StopCarbon-10.7B-v6 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:45:25.846372(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of kekmodel/StopCarbon-10.7B-v6\n\n\n\nDataset automatically created during the evaluation run of model kekmodel/StopCarbon-10.7B-v6 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:45:25.846372(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
48d98100c1481c90fb191add59f137d3111e02a8
Inspieriert von folgendem Paper und Datensatz: https://huggingface.co/datasets/andersonbcdefg/synthetic_retrieval_tasks/ https://arxiv.org/abs/2401.00368
SebastianBodza/RAG_Aufgaben
[ "arxiv:2401.00368", "region:us" ]
2024-01-04T12:57:04+00:00
{}
2024-01-04T12:58:40+00:00
[ "2401.00368" ]
[]
TAGS #arxiv-2401.00368 #region-us
Inspieriert von folgendem Paper und Datensatz: URL URL
[]
[ "TAGS\n#arxiv-2401.00368 #region-us \n" ]
[ 14 ]
[ "passage: TAGS\n#arxiv-2401.00368 #region-us \n" ]
8318387269e2d53d5a4f16b99e260776520edd49
# Ficbook dataset ## Table of Contents - [Table of Contents](#table-of-contents) - [Description](#description) - [Usage](#usage) - [Personal and Sensitive Information](#personal-and-sensitive-information) ## Description **Summary:** Dataset of 344k fan fiction stories from [ficbook.net](https://ficbook.net/). Dataset collection is still in progress. **Script:** [create_ficbook.py](https://github.com/IlyaGusev/rulm/blob/master/data_processing/create_ficbook.py) **Point of Contact:** [Ilya Gusev]([email protected]) **Languages:** Mostly Russian ## Usage Dataset iteration: ```python from datasets import load_dataset dataset = load_dataset('IlyaGusev/ficbook', split="train", streaming=True) for example in dataset: print(example["parts"][0]["clean_text"]) ``` ## Personal and Sensitive Information Information about the original authors is included in the dataset where possible. Many stories from the dataset contain NSFW content.
IlyaGusev/ficbook
[ "task_categories:text-generation", "size_categories:100K<n<1M", "language:ru", "not-for-all-audiences", "roleplay", "region:us" ]
2024-01-04T12:57:58+00:00
{"language": ["ru"], "size_categories": ["100K<n<1M"], "task_categories": ["text-generation"], "pretty_name": "Ficbook Refined", "dataset_info": {"features": [{"name": "url", "dtype": "string"}, {"name": "authors", "sequence": "string"}, {"name": "tags", "sequence": "string"}, {"name": "description", "dtype": "string"}, {"name": "likes", "dtype": "int64"}, {"name": "parts", "list": [{"name": "clean_text", "dtype": "string"}, {"name": "date", "dtype": "string"}, {"name": "text", "dtype": "string"}, {"name": "title", "dtype": "string"}, {"name": "url", "dtype": "string"}]}, {"name": "part_count", "dtype": "int64"}, {"name": "title", "dtype": "string"}, {"name": "rating", "dtype": "string"}, {"name": "status", "dtype": "string"}, {"name": "direction", "dtype": "string"}, {"name": "category", "dtype": "string"}, {"name": "pairing", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 34711888146, "num_examples": 344184}], "download_size": 17809848225, "dataset_size": 34711888146}, "tags": ["not-for-all-audiences", "roleplay"]}
2024-01-18T19:52:02+00:00
[]
[ "ru" ]
TAGS #task_categories-text-generation #size_categories-100K<n<1M #language-Russian #not-for-all-audiences #roleplay #region-us
# Ficbook dataset ## Table of Contents - Table of Contents - Description - Usage - Personal and Sensitive Information ## Description Summary: Dataset of 344k fan fiction stories from URL. Dataset collection is still in progress. Script: create_ficbook.py Point of Contact: Ilya Gusev Languages: Mostly Russian ## Usage Dataset iteration: ## Personal and Sensitive Information Information about the original authors is included in the dataset where possible. Many stories from the dataset contain NSFW content.
[ "# Ficbook dataset", "## Table of Contents\n- Table of Contents\n- Description\n- Usage\n- Personal and Sensitive Information", "## Description\n\nSummary: Dataset of 344k fan fiction stories from URL. Dataset collection is still in progress.\n\nScript: create_ficbook.py\n\nPoint of Contact: Ilya Gusev\n\nLanguages: Mostly Russian", "## Usage\n\nDataset iteration:", "## Personal and Sensitive Information\n\nInformation about the original authors is included in the dataset where possible. Many stories from the dataset contain NSFW content." ]
[ "TAGS\n#task_categories-text-generation #size_categories-100K<n<1M #language-Russian #not-for-all-audiences #roleplay #region-us \n", "# Ficbook dataset", "## Table of Contents\n- Table of Contents\n- Description\n- Usage\n- Personal and Sensitive Information", "## Description\n\nSummary: Dataset of 344k fan fiction stories from URL. Dataset collection is still in progress.\n\nScript: create_ficbook.py\n\nPoint of Contact: Ilya Gusev\n\nLanguages: Mostly Russian", "## Usage\n\nDataset iteration:", "## Personal and Sensitive Information\n\nInformation about the original authors is included in the dataset where possible. Many stories from the dataset contain NSFW content." ]
[ 47, 6, 22, 47, 9, 33 ]
[ "passage: TAGS\n#task_categories-text-generation #size_categories-100K<n<1M #language-Russian #not-for-all-audiences #roleplay #region-us \n# Ficbook dataset## Table of Contents\n- Table of Contents\n- Description\n- Usage\n- Personal and Sensitive Information## Description\n\nSummary: Dataset of 344k fan fiction stories from URL. Dataset collection is still in progress.\n\nScript: create_ficbook.py\n\nPoint of Contact: Ilya Gusev\n\nLanguages: Mostly Russian## Usage\n\nDataset iteration:## Personal and Sensitive Information\n\nInformation about the original authors is included in the dataset where possible. Many stories from the dataset contain NSFW content." ]
a2bdec6eabfe1f865b38a2a3219fd38a06c34708
# Dataset Card for Evaluation run of scaledown/ScaleDown-7B-slerp-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [scaledown/ScaleDown-7B-slerp-v0.1](https://huggingface.co/scaledown/ScaleDown-7B-slerp-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_scaledown__ScaleDown-7B-slerp-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T12:55:50.011643](https://huggingface.co/datasets/open-llm-leaderboard/details_scaledown__ScaleDown-7B-slerp-v0.1/blob/main/results_2024-01-04T12-55-50.011643.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6556854225506745, "acc_stderr": 0.03194313996110711, "acc_norm": 0.6565457814818652, "acc_norm_stderr": 0.03259529584470868, "mc1": 0.4467564259485924, "mc1_stderr": 0.017403977522557144, "mc2": 0.6190313414514481, "mc2_stderr": 0.01508778013321091 }, "harness|arc:challenge|25": { "acc": 0.6501706484641638, "acc_stderr": 0.013936809212158287, "acc_norm": 0.6800341296928327, "acc_norm_stderr": 0.013631345807016195 }, "harness|hellaswag|10": { "acc": 0.6620195180242979, "acc_stderr": 0.0047205513235471265, "acc_norm": 0.8570005974905397, "acc_norm_stderr": 0.003493567914093289 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6370370370370371, "acc_stderr": 0.04153948404742398, "acc_norm": 0.6370370370370371, "acc_norm_stderr": 0.04153948404742398 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7039473684210527, "acc_stderr": 0.03715062154998904, "acc_norm": 0.7039473684210527, "acc_norm_stderr": 0.03715062154998904 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7094339622641509, "acc_stderr": 0.02794321998933713, "acc_norm": 0.7094339622641509, "acc_norm_stderr": 0.02794321998933713 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.79, "acc_stderr": 0.04093601807403326, "acc_norm": 0.79, "acc_norm_stderr": 0.04093601807403326 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6170212765957447, "acc_stderr": 0.03177821250236922, "acc_norm": 0.6170212765957447, "acc_norm_stderr": 0.03177821250236922 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482757, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482757 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41005291005291006, "acc_stderr": 0.025331202438944433, "acc_norm": 0.41005291005291006, "acc_norm_stderr": 0.025331202438944433 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7935483870967742, "acc_stderr": 0.023025899617188716, "acc_norm": 0.7935483870967742, "acc_norm_stderr": 0.023025899617188716 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.022935144053919443, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.022935144053919443 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6820512820512821, "acc_stderr": 0.023610884308927865, "acc_norm": 0.6820512820512821, "acc_norm_stderr": 0.023610884308927865 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.029381620726465066, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.029381620726465066 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7058823529411765, "acc_stderr": 0.029597329730978082, "acc_norm": 0.7058823529411765, "acc_norm_stderr": 0.029597329730978082 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8623853211009175, "acc_stderr": 0.014770105878649395, "acc_norm": 0.8623853211009175, "acc_norm_stderr": 0.014770105878649395 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5370370370370371, "acc_stderr": 0.03400603625538271, "acc_norm": 0.5370370370370371, "acc_norm_stderr": 0.03400603625538271 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.026756401538078966, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.026756401538078966 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.02553010046023349, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.02553010046023349 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159465, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159465 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037182, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037182 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7914110429447853, "acc_stderr": 0.03192193448934724, "acc_norm": 0.7914110429447853, "acc_norm_stderr": 0.03192193448934724 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8403575989782887, "acc_stderr": 0.013097934513263004, "acc_norm": 0.8403575989782887, "acc_norm_stderr": 0.013097934513263004 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4212290502793296, "acc_stderr": 0.01651367603117959, "acc_norm": 0.4212290502793296, "acc_norm_stderr": 0.01651367603117959 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.738562091503268, "acc_stderr": 0.025160998214292456, "acc_norm": 0.738562091503268, "acc_norm_stderr": 0.025160998214292456 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7041800643086816, "acc_stderr": 0.025922371788818763, "acc_norm": 0.7041800643086816, "acc_norm_stderr": 0.025922371788818763 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600712992, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600712992 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.029820747191422473, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.029820747191422473 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4661016949152542, "acc_stderr": 0.01274085387294983, "acc_norm": 0.4661016949152542, "acc_norm_stderr": 0.01274085387294983 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6948529411764706, "acc_stderr": 0.027971541370170598, "acc_norm": 0.6948529411764706, "acc_norm_stderr": 0.027971541370170598 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6813725490196079, "acc_stderr": 0.01885008469646872, "acc_norm": 0.6813725490196079, "acc_norm_stderr": 0.01885008469646872 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8756218905472637, "acc_stderr": 0.023335401790166327, "acc_norm": 0.8756218905472637, "acc_norm_stderr": 0.023335401790166327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.038823108508905954, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.038823108508905954 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8245614035087719, "acc_stderr": 0.029170885500727665, "acc_norm": 0.8245614035087719, "acc_norm_stderr": 0.029170885500727665 }, "harness|truthfulqa:mc|0": { "mc1": 0.4467564259485924, "mc1_stderr": 0.017403977522557144, "mc2": 0.6190313414514481, "mc2_stderr": 0.01508778013321091 }, "harness|winogrande|5": { "acc": 0.813733228097869, "acc_stderr": 0.01094187795567621 }, "harness|gsm8k|5": { "acc": 0.6717210007581501, "acc_stderr": 0.012934758019449618 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_scaledown__ScaleDown-7B-slerp-v0.1
[ "region:us" ]
2024-01-04T12:58:09+00:00
{"pretty_name": "Evaluation run of scaledown/ScaleDown-7B-slerp-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [scaledown/ScaleDown-7B-slerp-v0.1](https://huggingface.co/scaledown/ScaleDown-7B-slerp-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_scaledown__ScaleDown-7B-slerp-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T12:55:50.011643](https://huggingface.co/datasets/open-llm-leaderboard/details_scaledown__ScaleDown-7B-slerp-v0.1/blob/main/results_2024-01-04T12-55-50.011643.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6556854225506745,\n \"acc_stderr\": 0.03194313996110711,\n \"acc_norm\": 0.6565457814818652,\n \"acc_norm_stderr\": 0.03259529584470868,\n \"mc1\": 0.4467564259485924,\n \"mc1_stderr\": 0.017403977522557144,\n \"mc2\": 0.6190313414514481,\n \"mc2_stderr\": 0.01508778013321091\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6501706484641638,\n \"acc_stderr\": 0.013936809212158287,\n \"acc_norm\": 0.6800341296928327,\n \"acc_norm_stderr\": 0.013631345807016195\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6620195180242979,\n \"acc_stderr\": 0.0047205513235471265,\n \"acc_norm\": 0.8570005974905397,\n \"acc_norm_stderr\": 0.003493567914093289\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6370370370370371,\n \"acc_stderr\": 0.04153948404742398,\n \"acc_norm\": 0.6370370370370371,\n \"acc_norm_stderr\": 0.04153948404742398\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7039473684210527,\n \"acc_stderr\": 0.03715062154998904,\n \"acc_norm\": 0.7039473684210527,\n \"acc_norm_stderr\": 0.03715062154998904\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7094339622641509,\n \"acc_stderr\": 0.02794321998933713,\n \"acc_norm\": 0.7094339622641509,\n \"acc_norm_stderr\": 0.02794321998933713\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.79,\n \"acc_stderr\": 0.04093601807403326,\n \"acc_norm\": 0.79,\n \"acc_norm_stderr\": 0.04093601807403326\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6170212765957447,\n \"acc_stderr\": 0.03177821250236922,\n \"acc_norm\": 0.6170212765957447,\n \"acc_norm_stderr\": 0.03177821250236922\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482757,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482757\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41005291005291006,\n \"acc_stderr\": 0.025331202438944433,\n \"acc_norm\": 0.41005291005291006,\n \"acc_norm_stderr\": 0.025331202438944433\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7935483870967742,\n \"acc_stderr\": 0.023025899617188716,\n \"acc_norm\": 0.7935483870967742,\n \"acc_norm_stderr\": 0.023025899617188716\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919443,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919443\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6820512820512821,\n \"acc_stderr\": 0.023610884308927865,\n \"acc_norm\": 0.6820512820512821,\n \"acc_norm_stderr\": 0.023610884308927865\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.029381620726465066,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.029381620726465066\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7058823529411765,\n \"acc_stderr\": 0.029597329730978082,\n \"acc_norm\": 0.7058823529411765,\n \"acc_norm_stderr\": 0.029597329730978082\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8623853211009175,\n \"acc_stderr\": 0.014770105878649395,\n \"acc_norm\": 0.8623853211009175,\n \"acc_norm_stderr\": 0.014770105878649395\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5370370370370371,\n \"acc_stderr\": 0.03400603625538271,\n \"acc_norm\": 0.5370370370370371,\n \"acc_norm_stderr\": 0.03400603625538271\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078966,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078966\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.02553010046023349,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.02553010046023349\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159465,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159465\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.03755265865037182,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.03755265865037182\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7914110429447853,\n \"acc_stderr\": 0.03192193448934724,\n \"acc_norm\": 0.7914110429447853,\n \"acc_norm_stderr\": 0.03192193448934724\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8403575989782887,\n \"acc_stderr\": 0.013097934513263004,\n \"acc_norm\": 0.8403575989782887,\n \"acc_norm_stderr\": 0.013097934513263004\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4212290502793296,\n \"acc_stderr\": 0.01651367603117959,\n \"acc_norm\": 0.4212290502793296,\n \"acc_norm_stderr\": 0.01651367603117959\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.738562091503268,\n \"acc_stderr\": 0.025160998214292456,\n \"acc_norm\": 0.738562091503268,\n \"acc_norm_stderr\": 0.025160998214292456\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n \"acc_stderr\": 0.025922371788818763,\n \"acc_norm\": 0.7041800643086816,\n \"acc_norm_stderr\": 0.025922371788818763\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600712992,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600712992\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.029820747191422473,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.029820747191422473\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4661016949152542,\n \"acc_stderr\": 0.01274085387294983,\n \"acc_norm\": 0.4661016949152542,\n \"acc_norm_stderr\": 0.01274085387294983\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6948529411764706,\n \"acc_stderr\": 0.027971541370170598,\n \"acc_norm\": 0.6948529411764706,\n \"acc_norm_stderr\": 0.027971541370170598\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6813725490196079,\n \"acc_stderr\": 0.01885008469646872,\n \"acc_norm\": 0.6813725490196079,\n \"acc_norm_stderr\": 0.01885008469646872\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8756218905472637,\n \"acc_stderr\": 0.023335401790166327,\n \"acc_norm\": 0.8756218905472637,\n \"acc_norm_stderr\": 0.023335401790166327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.038823108508905954,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.038823108508905954\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8245614035087719,\n \"acc_stderr\": 0.029170885500727665,\n \"acc_norm\": 0.8245614035087719,\n \"acc_norm_stderr\": 0.029170885500727665\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4467564259485924,\n \"mc1_stderr\": 0.017403977522557144,\n \"mc2\": 0.6190313414514481,\n \"mc2_stderr\": 0.01508778013321091\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.813733228097869,\n \"acc_stderr\": 0.01094187795567621\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6717210007581501,\n \"acc_stderr\": 0.012934758019449618\n }\n}\n```", "repo_url": "https://huggingface.co/scaledown/ScaleDown-7B-slerp-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T12-55-50.011643.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["**/details_harness|winogrande|5_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T12-55-50.011643.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T12_55_50.011643", "path": ["results_2024-01-04T12-55-50.011643.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T12-55-50.011643.parquet"]}]}]}
2024-01-04T12:58:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of scaledown/ScaleDown-7B-slerp-v0.1 Dataset automatically created during the evaluation run of model scaledown/ScaleDown-7B-slerp-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T12:55:50.011643(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of scaledown/ScaleDown-7B-slerp-v0.1\n\n\n\nDataset automatically created during the evaluation run of model scaledown/ScaleDown-7B-slerp-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:55:50.011643(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of scaledown/ScaleDown-7B-slerp-v0.1\n\n\n\nDataset automatically created during the evaluation run of model scaledown/ScaleDown-7B-slerp-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T12:55:50.011643(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 191, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of scaledown/ScaleDown-7B-slerp-v0.1\n\n\n\nDataset automatically created during the evaluation run of model scaledown/ScaleDown-7B-slerp-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T12:55:50.011643(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
8833dc2b5eaabe84cb6c6ea74c5eab5ddb1e803d
# Dataset Card for Evaluation run of mlabonne/Beyonder-4x7B-v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [mlabonne/Beyonder-4x7B-v2](https://huggingface.co/mlabonne/Beyonder-4x7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mlabonne__Beyonder-4x7B-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:00:16.346263](https://huggingface.co/datasets/open-llm-leaderboard/details_mlabonne__Beyonder-4x7B-v2/blob/main/results_2024-01-04T13-00-16.346263.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6557407580878285, "acc_stderr": 0.031986495815639754, "acc_norm": 0.6553471404895377, "acc_norm_stderr": 0.03264904081955929, "mc1": 0.44430844553243576, "mc1_stderr": 0.017394586250743173, "mc2": 0.606846132898595, "mc2_stderr": 0.015656381105660862 }, "harness|arc:challenge|25": { "acc": 0.6680887372013652, "acc_stderr": 0.013760988200880541, "acc_norm": 0.6877133105802048, "acc_norm_stderr": 0.013542598541688065 }, "harness|hellaswag|10": { "acc": 0.6960764787890859, "acc_stderr": 0.004590100050198816, "acc_norm": 0.8679545907189803, "acc_norm_stderr": 0.0033784824887488746 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6592592592592592, "acc_stderr": 0.04094376269996792, "acc_norm": 0.6592592592592592, "acc_norm_stderr": 0.04094376269996792 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6710526315789473, "acc_stderr": 0.03823428969926605, "acc_norm": 0.6710526315789473, "acc_norm_stderr": 0.03823428969926605 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7245283018867924, "acc_stderr": 0.027495663683724057, "acc_norm": 0.7245283018867924, "acc_norm_stderr": 0.027495663683724057 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7986111111111112, "acc_stderr": 0.03353647469713839, "acc_norm": 0.7986111111111112, "acc_norm_stderr": 0.03353647469713839 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.53, "acc_stderr": 0.05016135580465919, "acc_norm": 0.53, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.04725815626252604, "acc_norm": 0.33, "acc_norm_stderr": 0.04725815626252604 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.653179190751445, "acc_stderr": 0.036291466701596636, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.43137254901960786, "acc_stderr": 0.04928099597287533, "acc_norm": 0.43137254901960786, "acc_norm_stderr": 0.04928099597287533 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5617021276595745, "acc_stderr": 0.03243618636108102, "acc_norm": 0.5617021276595745, "acc_norm_stderr": 0.03243618636108102 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370332, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370332 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.02540255550326091, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.02540255550326091 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677172, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677172 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7967741935483871, "acc_stderr": 0.02289168798455496, "acc_norm": 0.7967741935483871, "acc_norm_stderr": 0.02289168798455496 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5221674876847291, "acc_stderr": 0.03514528562175007, "acc_norm": 0.5221674876847291, "acc_norm_stderr": 0.03514528562175007 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.803030303030303, "acc_stderr": 0.028335609732463362, "acc_norm": 0.803030303030303, "acc_norm_stderr": 0.028335609732463362 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603348, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603348 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6743589743589744, "acc_stderr": 0.02375966576741229, "acc_norm": 0.6743589743589744, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.02874204090394848, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.02874204090394848 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7100840336134454, "acc_stderr": 0.029472485833136077, "acc_norm": 0.7100840336134454, "acc_norm_stderr": 0.029472485833136077 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.03822746937658752, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.03822746937658752 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8513761467889909, "acc_stderr": 0.015251253773660836, "acc_norm": 0.8513761467889909, "acc_norm_stderr": 0.015251253773660836 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5092592592592593, "acc_stderr": 0.034093869469927006, "acc_norm": 0.5092592592592593, "acc_norm_stderr": 0.034093869469927006 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.025524722324553353, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.025524722324553353 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.025530100460233497, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.025530100460233497 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313729, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990947, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990947 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.033519538795212696, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.047268355537191, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.047268355537191 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.039891398595317706, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.039891398595317706 }, "harness|hendrycksTest-marketing|5": { "acc": 0.9017094017094017, "acc_stderr": 0.019503444900757567, "acc_norm": 0.9017094017094017, "acc_norm_stderr": 0.019503444900757567 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8263090676883781, "acc_stderr": 0.01354741565866226, "acc_norm": 0.8263090676883781, "acc_norm_stderr": 0.01354741565866226 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069363, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069363 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4424581005586592, "acc_stderr": 0.016611393687268577, "acc_norm": 0.4424581005586592, "acc_norm_stderr": 0.016611393687268577 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137894, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137894 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.729903536977492, "acc_stderr": 0.025218040373410633, "acc_norm": 0.729903536977492, "acc_norm_stderr": 0.025218040373410633 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7469135802469136, "acc_stderr": 0.024191808600713, "acc_norm": 0.7469135802469136, "acc_norm_stderr": 0.024191808600713 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5035460992907801, "acc_stderr": 0.02982674915328092, "acc_norm": 0.5035460992907801, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4641460234680574, "acc_stderr": 0.012737361318730583, "acc_norm": 0.4641460234680574, "acc_norm_stderr": 0.012737361318730583 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6838235294117647, "acc_stderr": 0.02824568739146292, "acc_norm": 0.6838235294117647, "acc_norm_stderr": 0.02824568739146292 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6617647058823529, "acc_stderr": 0.01913994374848704, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.01913994374848704 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7183673469387755, "acc_stderr": 0.028795185574291296, "acc_norm": 0.7183673469387755, "acc_norm_stderr": 0.028795185574291296 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.44430844553243576, "mc1_stderr": 0.017394586250743173, "mc2": 0.606846132898595, "mc2_stderr": 0.015656381105660862 }, "harness|winogrande|5": { "acc": 0.8089976322020521, "acc_stderr": 0.011047808761510423 }, "harness|gsm8k|5": { "acc": 0.7172100075815011, "acc_stderr": 0.012405020417873619 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_shadowml__Beyonder-4x7B-v2
[ "region:us" ]
2024-01-04T13:02:38+00:00
{"pretty_name": "Evaluation run of mlabonne/Beyonder-4x7B-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [mlabonne/Beyonder-4x7B-v2](https://huggingface.co/mlabonne/Beyonder-4x7B-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mlabonne__Beyonder-4x7B-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:00:16.346263](https://huggingface.co/datasets/open-llm-leaderboard/details_mlabonne__Beyonder-4x7B-v2/blob/main/results_2024-01-04T13-00-16.346263.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6557407580878285,\n \"acc_stderr\": 0.031986495815639754,\n \"acc_norm\": 0.6553471404895377,\n \"acc_norm_stderr\": 0.03264904081955929,\n \"mc1\": 0.44430844553243576,\n \"mc1_stderr\": 0.017394586250743173,\n \"mc2\": 0.606846132898595,\n \"mc2_stderr\": 0.015656381105660862\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6680887372013652,\n \"acc_stderr\": 0.013760988200880541,\n \"acc_norm\": 0.6877133105802048,\n \"acc_norm_stderr\": 0.013542598541688065\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6960764787890859,\n \"acc_stderr\": 0.004590100050198816,\n \"acc_norm\": 0.8679545907189803,\n \"acc_norm_stderr\": 0.0033784824887488746\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6592592592592592,\n \"acc_stderr\": 0.04094376269996792,\n \"acc_norm\": 0.6592592592592592,\n \"acc_norm_stderr\": 0.04094376269996792\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6710526315789473,\n \"acc_stderr\": 0.03823428969926605,\n \"acc_norm\": 0.6710526315789473,\n \"acc_norm_stderr\": 0.03823428969926605\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7245283018867924,\n \"acc_stderr\": 0.027495663683724057,\n \"acc_norm\": 0.7245283018867924,\n \"acc_norm_stderr\": 0.027495663683724057\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7986111111111112,\n \"acc_stderr\": 0.03353647469713839,\n \"acc_norm\": 0.7986111111111112,\n \"acc_norm_stderr\": 0.03353647469713839\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.53,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.53,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.04725815626252604,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.04725815626252604\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.43137254901960786,\n \"acc_stderr\": 0.04928099597287533,\n \"acc_norm\": 0.43137254901960786,\n \"acc_norm_stderr\": 0.04928099597287533\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5617021276595745,\n \"acc_stderr\": 0.03243618636108102,\n \"acc_norm\": 0.5617021276595745,\n \"acc_norm_stderr\": 0.03243618636108102\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370332,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370332\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41798941798941797,\n \"acc_stderr\": 0.02540255550326091,\n \"acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.02540255550326091\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677172,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677172\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7967741935483871,\n \"acc_stderr\": 0.02289168798455496,\n \"acc_norm\": 0.7967741935483871,\n \"acc_norm_stderr\": 0.02289168798455496\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.03514528562175007,\n \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.03514528562175007\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.803030303030303,\n \"acc_stderr\": 0.028335609732463362,\n \"acc_norm\": 0.803030303030303,\n \"acc_norm_stderr\": 0.028335609732463362\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.02874204090394848,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.02874204090394848\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7100840336134454,\n \"acc_stderr\": 0.029472485833136077,\n \"acc_norm\": 0.7100840336134454,\n \"acc_norm_stderr\": 0.029472485833136077\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.03822746937658752,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.03822746937658752\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8513761467889909,\n \"acc_stderr\": 0.015251253773660836,\n \"acc_norm\": 0.8513761467889909,\n \"acc_norm_stderr\": 0.015251253773660836\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5092592592592593,\n \"acc_stderr\": 0.034093869469927006,\n \"acc_norm\": 0.5092592592592593,\n \"acc_norm_stderr\": 0.034093869469927006\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.025524722324553353,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.025524722324553353\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.025530100460233497,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.025530100460233497\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990947,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990947\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.047268355537191,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.047268355537191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.9017094017094017,\n \"acc_stderr\": 0.019503444900757567,\n \"acc_norm\": 0.9017094017094017,\n \"acc_norm_stderr\": 0.019503444900757567\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n \"acc_stderr\": 0.01354741565866226,\n \"acc_norm\": 0.8263090676883781,\n \"acc_norm_stderr\": 0.01354741565866226\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069363,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069363\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4424581005586592,\n \"acc_stderr\": 0.016611393687268577,\n \"acc_norm\": 0.4424581005586592,\n \"acc_norm_stderr\": 0.016611393687268577\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137894,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137894\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.729903536977492,\n \"acc_stderr\": 0.025218040373410633,\n \"acc_norm\": 0.729903536977492,\n \"acc_norm_stderr\": 0.025218040373410633\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7469135802469136,\n \"acc_stderr\": 0.024191808600713,\n \"acc_norm\": 0.7469135802469136,\n \"acc_norm_stderr\": 0.024191808600713\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5035460992907801,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.5035460992907801,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4641460234680574,\n \"acc_stderr\": 0.012737361318730583,\n \"acc_norm\": 0.4641460234680574,\n \"acc_norm_stderr\": 0.012737361318730583\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6838235294117647,\n \"acc_stderr\": 0.02824568739146292,\n \"acc_norm\": 0.6838235294117647,\n \"acc_norm_stderr\": 0.02824568739146292\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.01913994374848704,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.01913994374848704\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7183673469387755,\n \"acc_stderr\": 0.028795185574291296,\n \"acc_norm\": 0.7183673469387755,\n \"acc_norm_stderr\": 0.028795185574291296\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.44430844553243576,\n \"mc1_stderr\": 0.017394586250743173,\n \"mc2\": 0.606846132898595,\n \"mc2_stderr\": 0.015656381105660862\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8089976322020521,\n \"acc_stderr\": 0.011047808761510423\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7172100075815011,\n \"acc_stderr\": 0.012405020417873619\n }\n}\n```", "repo_url": "https://huggingface.co/mlabonne/Beyonder-4x7B-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-00-16.346263.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["**/details_harness|winogrande|5_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-00-16.346263.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_00_16.346263", "path": ["results_2024-01-04T13-00-16.346263.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-00-16.346263.parquet"]}]}]}
2024-01-08T08:22:35+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mlabonne/Beyonder-4x7B-v2 Dataset automatically created during the evaluation run of model mlabonne/Beyonder-4x7B-v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:00:16.346263(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of mlabonne/Beyonder-4x7B-v2\n\n\n\nDataset automatically created during the evaluation run of model mlabonne/Beyonder-4x7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:00:16.346263(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mlabonne/Beyonder-4x7B-v2\n\n\n\nDataset automatically created during the evaluation run of model mlabonne/Beyonder-4x7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:00:16.346263(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of mlabonne/Beyonder-4x7B-v2\n\n\n\nDataset automatically created during the evaluation run of model mlabonne/Beyonder-4x7B-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:00:16.346263(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
ba3f47dd18104ef770f5d7563b445e563fab9eb7
# Dataset Card for Evaluation run of Josephgflowers/TinyLlama-3T-Cinder-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Josephgflowers/TinyLlama-3T-Cinder-v1](https://huggingface.co/Josephgflowers/TinyLlama-3T-Cinder-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Josephgflowers__TinyLlama-3T-Cinder-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:03:09.037379](https://huggingface.co/datasets/open-llm-leaderboard/details_Josephgflowers__TinyLlama-3T-Cinder-v1/blob/main/results_2024-01-04T13-03-09.037379.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.26371678954522415, "acc_stderr": 0.031150234725395142, "acc_norm": 0.2650207569978929, "acc_norm_stderr": 0.03197810250604355, "mc1": 0.21909424724602203, "mc1_stderr": 0.014480038578757442, "mc2": 0.3832145699748184, "mc2_stderr": 0.015352373020404608 }, "harness|arc:challenge|25": { "acc": 0.3054607508532423, "acc_stderr": 0.013460080478002505, "acc_norm": 0.33532423208191126, "acc_norm_stderr": 0.013796182947785568 }, "harness|hellaswag|10": { "acc": 0.37801234813782114, "acc_stderr": 0.004838997427699743, "acc_norm": 0.46355307707627963, "acc_norm_stderr": 0.004976507121076259 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2518518518518518, "acc_stderr": 0.03749850709174022, "acc_norm": 0.2518518518518518, "acc_norm_stderr": 0.03749850709174022 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.2565789473684211, "acc_stderr": 0.0355418036802569, "acc_norm": 0.2565789473684211, "acc_norm_stderr": 0.0355418036802569 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.26037735849056604, "acc_stderr": 0.02700876609070809, "acc_norm": 0.26037735849056604, "acc_norm_stderr": 0.02700876609070809 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2777777777777778, "acc_stderr": 0.03745554791462457, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.03745554791462457 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.22, "acc_stderr": 0.041633319989322695, "acc_norm": 0.22, "acc_norm_stderr": 0.041633319989322695 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.1676300578034682, "acc_stderr": 0.028481963032143395, "acc_norm": 0.1676300578034682, "acc_norm_stderr": 0.028481963032143395 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171453, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171453 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2765957446808511, "acc_stderr": 0.0292418838696288, "acc_norm": 0.2765957446808511, "acc_norm_stderr": 0.0292418838696288 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2982456140350877, "acc_stderr": 0.04303684033537315, "acc_norm": 0.2982456140350877, "acc_norm_stderr": 0.04303684033537315 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2689655172413793, "acc_stderr": 0.036951833116502325, "acc_norm": 0.2689655172413793, "acc_norm_stderr": 0.036951833116502325 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.023517294335963286, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.023517294335963286 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.2619047619047619, "acc_stderr": 0.03932537680392869, "acc_norm": 0.2619047619047619, "acc_norm_stderr": 0.03932537680392869 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.2903225806451613, "acc_stderr": 0.02582210611941589, "acc_norm": 0.2903225806451613, "acc_norm_stderr": 0.02582210611941589 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.32019704433497537, "acc_stderr": 0.032826493853041504, "acc_norm": 0.32019704433497537, "acc_norm_stderr": 0.032826493853041504 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.21, "acc_stderr": 0.040936018074033256, "acc_norm": 0.21, "acc_norm_stderr": 0.040936018074033256 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.26666666666666666, "acc_stderr": 0.03453131801885416, "acc_norm": 0.26666666666666666, "acc_norm_stderr": 0.03453131801885416 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.31313131313131315, "acc_stderr": 0.03304205087813652, "acc_norm": 0.31313131313131315, "acc_norm_stderr": 0.03304205087813652 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.2694300518134715, "acc_stderr": 0.03201867122877793, "acc_norm": 0.2694300518134715, "acc_norm_stderr": 0.03201867122877793 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.21794871794871795, "acc_stderr": 0.02093244577446319, "acc_norm": 0.21794871794871795, "acc_norm_stderr": 0.02093244577446319 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.23703703703703705, "acc_stderr": 0.025928876132766118, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.025928876132766118 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.027553614467863797, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.027553614467863797 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.24503311258278146, "acc_stderr": 0.03511807571804723, "acc_norm": 0.24503311258278146, "acc_norm_stderr": 0.03511807571804723 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.23302752293577983, "acc_stderr": 0.018125669180861493, "acc_norm": 0.23302752293577983, "acc_norm_stderr": 0.018125669180861493 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.03167468706828979, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.03167468706828979 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.23529411764705882, "acc_stderr": 0.02977177522814563, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.02977177522814563 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.24472573839662448, "acc_stderr": 0.027985699387036423, "acc_norm": 0.24472573839662448, "acc_norm_stderr": 0.027985699387036423 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.3094170403587444, "acc_stderr": 0.031024411740572213, "acc_norm": 0.3094170403587444, "acc_norm_stderr": 0.031024411740572213 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.26717557251908397, "acc_stderr": 0.03880848301082396, "acc_norm": 0.26717557251908397, "acc_norm_stderr": 0.03880848301082396 }, "harness|hendrycksTest-international_law|5": { "acc": 0.30578512396694213, "acc_stderr": 0.04205953933884124, "acc_norm": 0.30578512396694213, "acc_norm_stderr": 0.04205953933884124 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.24074074074074073, "acc_stderr": 0.04133119440243839, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.04133119440243839 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3067484662576687, "acc_stderr": 0.036230899157241474, "acc_norm": 0.3067484662576687, "acc_norm_stderr": 0.036230899157241474 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.20535714285714285, "acc_stderr": 0.03834241021419072, "acc_norm": 0.20535714285714285, "acc_norm_stderr": 0.03834241021419072 }, "harness|hendrycksTest-management|5": { "acc": 0.2912621359223301, "acc_stderr": 0.044986763205729224, "acc_norm": 0.2912621359223301, "acc_norm_stderr": 0.044986763205729224 }, "harness|hendrycksTest-marketing|5": { "acc": 0.23931623931623933, "acc_stderr": 0.027951826808924333, "acc_norm": 0.23931623931623933, "acc_norm_stderr": 0.027951826808924333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.04461960433384741, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384741 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2771392081736909, "acc_stderr": 0.01600563629412243, "acc_norm": 0.2771392081736909, "acc_norm_stderr": 0.01600563629412243 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0230836585869842, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0230836585869842 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24022346368715083, "acc_stderr": 0.014288343803925308, "acc_norm": 0.24022346368715083, "acc_norm_stderr": 0.014288343803925308 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24509803921568626, "acc_stderr": 0.024630048979824765, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.024630048979824765 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.3215434083601286, "acc_stderr": 0.026527724079528872, "acc_norm": 0.3215434083601286, "acc_norm_stderr": 0.026527724079528872 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2654320987654321, "acc_stderr": 0.024569223600460845, "acc_norm": 0.2654320987654321, "acc_norm_stderr": 0.024569223600460845 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2695035460992908, "acc_stderr": 0.026469036818590634, "acc_norm": 0.2695035460992908, "acc_norm_stderr": 0.026469036818590634 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.25554106910039115, "acc_stderr": 0.01113985783359852, "acc_norm": 0.25554106910039115, "acc_norm_stderr": 0.01113985783359852 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.20220588235294118, "acc_stderr": 0.02439819298665492, "acc_norm": 0.20220588235294118, "acc_norm_stderr": 0.02439819298665492 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.27450980392156865, "acc_stderr": 0.018054027458815194, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.018054027458815194 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.3090909090909091, "acc_stderr": 0.044262946482000985, "acc_norm": 0.3090909090909091, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.23265306122448978, "acc_stderr": 0.02704925791589618, "acc_norm": 0.23265306122448978, "acc_norm_stderr": 0.02704925791589618 }, "harness|hendrycksTest-sociology|5": { "acc": 0.21393034825870647, "acc_stderr": 0.02899690969332891, "acc_norm": 0.21393034825870647, "acc_norm_stderr": 0.02899690969332891 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-virology|5": { "acc": 0.2710843373493976, "acc_stderr": 0.03460579907553026, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.03460579907553026 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.21637426900584794, "acc_stderr": 0.03158149539338734, "acc_norm": 0.21637426900584794, "acc_norm_stderr": 0.03158149539338734 }, "harness|truthfulqa:mc|0": { "mc1": 0.21909424724602203, "mc1_stderr": 0.014480038578757442, "mc2": 0.3832145699748184, "mc2_stderr": 0.015352373020404608 }, "harness|winogrande|5": { "acc": 0.5659037095501184, "acc_stderr": 0.013929882555694054 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Josephgflowers__TinyLlama-3T-Cinder-v1
[ "region:us" ]
2024-01-04T13:04:59+00:00
{"pretty_name": "Evaluation run of Josephgflowers/TinyLlama-3T-Cinder-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [Josephgflowers/TinyLlama-3T-Cinder-v1](https://huggingface.co/Josephgflowers/TinyLlama-3T-Cinder-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Josephgflowers__TinyLlama-3T-Cinder-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:03:09.037379](https://huggingface.co/datasets/open-llm-leaderboard/details_Josephgflowers__TinyLlama-3T-Cinder-v1/blob/main/results_2024-01-04T13-03-09.037379.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.26371678954522415,\n \"acc_stderr\": 0.031150234725395142,\n \"acc_norm\": 0.2650207569978929,\n \"acc_norm_stderr\": 0.03197810250604355,\n \"mc1\": 0.21909424724602203,\n \"mc1_stderr\": 0.014480038578757442,\n \"mc2\": 0.3832145699748184,\n \"mc2_stderr\": 0.015352373020404608\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.3054607508532423,\n \"acc_stderr\": 0.013460080478002505,\n \"acc_norm\": 0.33532423208191126,\n \"acc_norm_stderr\": 0.013796182947785568\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.37801234813782114,\n \"acc_stderr\": 0.004838997427699743,\n \"acc_norm\": 0.46355307707627963,\n \"acc_norm_stderr\": 0.004976507121076259\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2518518518518518,\n \"acc_stderr\": 0.03749850709174022,\n \"acc_norm\": 0.2518518518518518,\n \"acc_norm_stderr\": 0.03749850709174022\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.2565789473684211,\n \"acc_stderr\": 0.0355418036802569,\n \"acc_norm\": 0.2565789473684211,\n \"acc_norm_stderr\": 0.0355418036802569\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.26037735849056604,\n \"acc_stderr\": 0.02700876609070809,\n \"acc_norm\": 0.26037735849056604,\n \"acc_norm_stderr\": 0.02700876609070809\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.03745554791462457,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.03745554791462457\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.041633319989322695,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.041633319989322695\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.1676300578034682,\n \"acc_stderr\": 0.028481963032143395,\n \"acc_norm\": 0.1676300578034682,\n \"acc_norm_stderr\": 0.028481963032143395\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171453,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171453\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2765957446808511,\n \"acc_stderr\": 0.0292418838696288,\n \"acc_norm\": 0.2765957446808511,\n \"acc_norm_stderr\": 0.0292418838696288\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2982456140350877,\n \"acc_stderr\": 0.04303684033537315,\n \"acc_norm\": 0.2982456140350877,\n \"acc_norm_stderr\": 0.04303684033537315\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2689655172413793,\n \"acc_stderr\": 0.036951833116502325,\n \"acc_norm\": 0.2689655172413793,\n \"acc_norm_stderr\": 0.036951833116502325\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.023517294335963286,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.023517294335963286\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.2619047619047619,\n \"acc_stderr\": 0.03932537680392869,\n \"acc_norm\": 0.2619047619047619,\n \"acc_norm_stderr\": 0.03932537680392869\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.2903225806451613,\n \"acc_stderr\": 0.02582210611941589,\n \"acc_norm\": 0.2903225806451613,\n \"acc_norm_stderr\": 0.02582210611941589\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.32019704433497537,\n \"acc_stderr\": 0.032826493853041504,\n \"acc_norm\": 0.32019704433497537,\n \"acc_norm_stderr\": 0.032826493853041504\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.21,\n \"acc_stderr\": 0.040936018074033256,\n \"acc_norm\": 0.21,\n \"acc_norm_stderr\": 0.040936018074033256\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.26666666666666666,\n \"acc_stderr\": 0.03453131801885416,\n \"acc_norm\": 0.26666666666666666,\n \"acc_norm_stderr\": 0.03453131801885416\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.31313131313131315,\n \"acc_stderr\": 0.03304205087813652,\n \"acc_norm\": 0.31313131313131315,\n \"acc_norm_stderr\": 0.03304205087813652\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.2694300518134715,\n \"acc_stderr\": 0.03201867122877793,\n \"acc_norm\": 0.2694300518134715,\n \"acc_norm_stderr\": 0.03201867122877793\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.21794871794871795,\n \"acc_stderr\": 0.02093244577446319,\n \"acc_norm\": 0.21794871794871795,\n \"acc_norm_stderr\": 0.02093244577446319\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.23703703703703705,\n \"acc_stderr\": 0.025928876132766118,\n \"acc_norm\": 0.23703703703703705,\n \"acc_norm_stderr\": 0.025928876132766118\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.027553614467863797,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.027553614467863797\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.24503311258278146,\n \"acc_stderr\": 0.03511807571804723,\n \"acc_norm\": 0.24503311258278146,\n \"acc_norm_stderr\": 0.03511807571804723\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.23302752293577983,\n \"acc_stderr\": 0.018125669180861493,\n \"acc_norm\": 0.23302752293577983,\n \"acc_norm_stderr\": 0.018125669180861493\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.03167468706828979,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.03167468706828979\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.02977177522814563,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.02977177522814563\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.24472573839662448,\n \"acc_stderr\": 0.027985699387036423,\n \"acc_norm\": 0.24472573839662448,\n \"acc_norm_stderr\": 0.027985699387036423\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.3094170403587444,\n \"acc_stderr\": 0.031024411740572213,\n \"acc_norm\": 0.3094170403587444,\n \"acc_norm_stderr\": 0.031024411740572213\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.26717557251908397,\n \"acc_stderr\": 0.03880848301082396,\n \"acc_norm\": 0.26717557251908397,\n \"acc_norm_stderr\": 0.03880848301082396\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.30578512396694213,\n \"acc_stderr\": 0.04205953933884124,\n \"acc_norm\": 0.30578512396694213,\n \"acc_norm_stderr\": 0.04205953933884124\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.3067484662576687,\n \"acc_stderr\": 0.036230899157241474,\n \"acc_norm\": 0.3067484662576687,\n \"acc_norm_stderr\": 0.036230899157241474\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.20535714285714285,\n \"acc_stderr\": 0.03834241021419072,\n \"acc_norm\": 0.20535714285714285,\n \"acc_norm_stderr\": 0.03834241021419072\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.2912621359223301,\n \"acc_stderr\": 0.044986763205729224,\n \"acc_norm\": 0.2912621359223301,\n \"acc_norm_stderr\": 0.044986763205729224\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.23931623931623933,\n \"acc_stderr\": 0.027951826808924333,\n \"acc_norm\": 0.23931623931623933,\n \"acc_norm_stderr\": 0.027951826808924333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384741,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384741\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2771392081736909,\n \"acc_stderr\": 0.01600563629412243,\n \"acc_norm\": 0.2771392081736909,\n \"acc_norm_stderr\": 0.01600563629412243\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.24277456647398843,\n \"acc_stderr\": 0.0230836585869842,\n \"acc_norm\": 0.24277456647398843,\n \"acc_norm_stderr\": 0.0230836585869842\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24022346368715083,\n \"acc_stderr\": 0.014288343803925308,\n \"acc_norm\": 0.24022346368715083,\n \"acc_norm_stderr\": 0.014288343803925308\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.024630048979824765,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.024630048979824765\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3215434083601286,\n \"acc_stderr\": 0.026527724079528872,\n \"acc_norm\": 0.3215434083601286,\n \"acc_norm_stderr\": 0.026527724079528872\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2654320987654321,\n \"acc_stderr\": 0.024569223600460845,\n \"acc_norm\": 0.2654320987654321,\n \"acc_norm_stderr\": 0.024569223600460845\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2695035460992908,\n \"acc_stderr\": 0.026469036818590634,\n \"acc_norm\": 0.2695035460992908,\n \"acc_norm_stderr\": 0.026469036818590634\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.25554106910039115,\n \"acc_stderr\": 0.01113985783359852,\n \"acc_norm\": 0.25554106910039115,\n \"acc_norm_stderr\": 0.01113985783359852\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.20220588235294118,\n \"acc_stderr\": 0.02439819298665492,\n \"acc_norm\": 0.20220588235294118,\n \"acc_norm_stderr\": 0.02439819298665492\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.018054027458815194,\n \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.018054027458815194\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.3090909090909091,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.3090909090909091,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.23265306122448978,\n \"acc_stderr\": 0.02704925791589618,\n \"acc_norm\": 0.23265306122448978,\n \"acc_norm_stderr\": 0.02704925791589618\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.21393034825870647,\n \"acc_stderr\": 0.02899690969332891,\n \"acc_norm\": 0.21393034825870647,\n \"acc_norm_stderr\": 0.02899690969332891\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2710843373493976,\n \"acc_stderr\": 0.03460579907553026,\n \"acc_norm\": 0.2710843373493976,\n \"acc_norm_stderr\": 0.03460579907553026\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.21637426900584794,\n \"acc_stderr\": 0.03158149539338734,\n \"acc_norm\": 0.21637426900584794,\n \"acc_norm_stderr\": 0.03158149539338734\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.21909424724602203,\n \"mc1_stderr\": 0.014480038578757442,\n \"mc2\": 0.3832145699748184,\n \"mc2_stderr\": 0.015352373020404608\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5659037095501184,\n \"acc_stderr\": 0.013929882555694054\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/Josephgflowers/TinyLlama-3T-Cinder-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-03-09.037379.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["**/details_harness|winogrande|5_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-03-09.037379.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_03_09.037379", "path": ["results_2024-01-04T13-03-09.037379.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-03-09.037379.parquet"]}]}]}
2024-01-04T13:05:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Josephgflowers/TinyLlama-3T-Cinder-v1 Dataset automatically created during the evaluation run of model Josephgflowers/TinyLlama-3T-Cinder-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:03:09.037379(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Josephgflowers/TinyLlama-3T-Cinder-v1\n\n\n\nDataset automatically created during the evaluation run of model Josephgflowers/TinyLlama-3T-Cinder-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:03:09.037379(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Josephgflowers/TinyLlama-3T-Cinder-v1\n\n\n\nDataset automatically created during the evaluation run of model Josephgflowers/TinyLlama-3T-Cinder-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:03:09.037379(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Josephgflowers/TinyLlama-3T-Cinder-v1\n\n\n\nDataset automatically created during the evaluation run of model Josephgflowers/TinyLlama-3T-Cinder-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:03:09.037379(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
712f63586c6b0caaf85d5becdbf77f2e03b7ed94
# Dataset Card for Evaluation run of bn22/tinyllama_frankenmerge <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [bn22/tinyllama_frankenmerge](https://huggingface.co/bn22/tinyllama_frankenmerge) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_bn22__tinyllama_frankenmerge", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:03:17.010192](https://huggingface.co/datasets/open-llm-leaderboard/details_bn22__tinyllama_frankenmerge/blob/main/results_2024-01-04T13-03-17.010192.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2646989243388527, "acc_stderr": 0.031102353164814696, "acc_norm": 0.265973406703187, "acc_norm_stderr": 0.03187459379138897, "mc1": 0.24969400244798043, "mc1_stderr": 0.015152286907148125, "mc2": 0.4018024343557987, "mc2_stderr": 0.01497649522857516 }, "harness|arc:challenge|25": { "acc": 0.2636518771331058, "acc_stderr": 0.012875929151297049, "acc_norm": 0.30204778156996587, "acc_norm_stderr": 0.013417519144716417 }, "harness|hellaswag|10": { "acc": 0.3995220075682135, "acc_stderr": 0.004887991225950274, "acc_norm": 0.5100577574188409, "acc_norm_stderr": 0.0049887717918545215 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.23703703703703705, "acc_stderr": 0.03673731683969506, "acc_norm": 0.23703703703703705, "acc_norm_stderr": 0.03673731683969506 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.19078947368421054, "acc_stderr": 0.031975658210325, "acc_norm": 0.19078947368421054, "acc_norm_stderr": 0.031975658210325 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542126, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542126 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.27547169811320754, "acc_stderr": 0.02749566368372407, "acc_norm": 0.27547169811320754, "acc_norm_stderr": 0.02749566368372407 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24277456647398843, "acc_stderr": 0.0326926380614177, "acc_norm": 0.24277456647398843, "acc_norm_stderr": 0.0326926380614177 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.19607843137254902, "acc_stderr": 0.03950581861179962, "acc_norm": 0.19607843137254902, "acc_norm_stderr": 0.03950581861179962 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2553191489361702, "acc_stderr": 0.02850485647051419, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.02850485647051419 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.22807017543859648, "acc_stderr": 0.03947152782669415, "acc_norm": 0.22807017543859648, "acc_norm_stderr": 0.03947152782669415 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2, "acc_stderr": 0.033333333333333284, "acc_norm": 0.2, "acc_norm_stderr": 0.033333333333333284 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2804232804232804, "acc_stderr": 0.023135287974325635, "acc_norm": 0.2804232804232804, "acc_norm_stderr": 0.023135287974325635 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.18253968253968253, "acc_stderr": 0.03455071019102148, "acc_norm": 0.18253968253968253, "acc_norm_stderr": 0.03455071019102148 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.1967741935483871, "acc_stderr": 0.022616409420742018, "acc_norm": 0.1967741935483871, "acc_norm_stderr": 0.022616409420742018 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2019704433497537, "acc_stderr": 0.02824735012218027, "acc_norm": 0.2019704433497537, "acc_norm_stderr": 0.02824735012218027 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.27, "acc_stderr": 0.04461960433384739, "acc_norm": 0.27, "acc_norm_stderr": 0.04461960433384739 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2606060606060606, "acc_stderr": 0.03427743175816524, "acc_norm": 0.2606060606060606, "acc_norm_stderr": 0.03427743175816524 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.25757575757575757, "acc_stderr": 0.031156269519646847, "acc_norm": 0.25757575757575757, "acc_norm_stderr": 0.031156269519646847 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.29533678756476683, "acc_stderr": 0.032922966391551386, "acc_norm": 0.29533678756476683, "acc_norm_stderr": 0.032922966391551386 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.36153846153846153, "acc_stderr": 0.024359581465396987, "acc_norm": 0.36153846153846153, "acc_norm_stderr": 0.024359581465396987 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.22962962962962963, "acc_stderr": 0.02564410863926763, "acc_norm": 0.22962962962962963, "acc_norm_stderr": 0.02564410863926763 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.2647058823529412, "acc_stderr": 0.02865749128507198, "acc_norm": 0.2647058823529412, "acc_norm_stderr": 0.02865749128507198 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.25165562913907286, "acc_stderr": 0.03543304234389985, "acc_norm": 0.25165562913907286, "acc_norm_stderr": 0.03543304234389985 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.22752293577981653, "acc_stderr": 0.017974463578776502, "acc_norm": 0.22752293577981653, "acc_norm_stderr": 0.017974463578776502 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.25, "acc_stderr": 0.03039153369274154, "acc_norm": 0.25, "acc_norm_stderr": 0.03039153369274154 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.270042194092827, "acc_stderr": 0.028900721906293433, "acc_norm": 0.270042194092827, "acc_norm_stderr": 0.028900721906293433 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.35874439461883406, "acc_stderr": 0.032190792004199956, "acc_norm": 0.35874439461883406, "acc_norm_stderr": 0.032190792004199956 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.22137404580152673, "acc_stderr": 0.03641297081313729, "acc_norm": 0.22137404580152673, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2396694214876033, "acc_stderr": 0.03896878985070417, "acc_norm": 0.2396694214876033, "acc_norm_stderr": 0.03896878985070417 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25, "acc_stderr": 0.04186091791394607, "acc_norm": 0.25, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2883435582822086, "acc_stderr": 0.035590395316173425, "acc_norm": 0.2883435582822086, "acc_norm_stderr": 0.035590395316173425 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.3125, "acc_stderr": 0.043994650575715215, "acc_norm": 0.3125, "acc_norm_stderr": 0.043994650575715215 }, "harness|hendrycksTest-management|5": { "acc": 0.1941747572815534, "acc_stderr": 0.03916667762822586, "acc_norm": 0.1941747572815534, "acc_norm_stderr": 0.03916667762822586 }, "harness|hendrycksTest-marketing|5": { "acc": 0.23504273504273504, "acc_stderr": 0.02777883590493543, "acc_norm": 0.23504273504273504, "acc_norm_stderr": 0.02777883590493543 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2515964240102171, "acc_stderr": 0.015517322365529615, "acc_norm": 0.2515964240102171, "acc_norm_stderr": 0.015517322365529615 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.25722543352601157, "acc_stderr": 0.023532925431044276, "acc_norm": 0.25722543352601157, "acc_norm_stderr": 0.023532925431044276 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.23798882681564246, "acc_stderr": 0.014242630070574915, "acc_norm": 0.23798882681564246, "acc_norm_stderr": 0.014242630070574915 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.2549019607843137, "acc_stderr": 0.024954184324879912, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.024954184324879912 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2604501607717042, "acc_stderr": 0.024926723224845557, "acc_norm": 0.2604501607717042, "acc_norm_stderr": 0.024926723224845557 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02438366553103545, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02438366553103545 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.23404255319148937, "acc_stderr": 0.025257861359432417, "acc_norm": 0.23404255319148937, "acc_norm_stderr": 0.025257861359432417 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2405475880052151, "acc_stderr": 0.010916406735478949, "acc_norm": 0.2405475880052151, "acc_norm_stderr": 0.010916406735478949 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.44485294117647056, "acc_stderr": 0.030187532060329376, "acc_norm": 0.44485294117647056, "acc_norm_stderr": 0.030187532060329376 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.21241830065359477, "acc_stderr": 0.016547148636203147, "acc_norm": 0.21241830065359477, "acc_norm_stderr": 0.016547148636203147 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.23636363636363636, "acc_stderr": 0.04069306319721377, "acc_norm": 0.23636363636363636, "acc_norm_stderr": 0.04069306319721377 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.1836734693877551, "acc_stderr": 0.024789071332007643, "acc_norm": 0.1836734693877551, "acc_norm_stderr": 0.024789071332007643 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.03014777593540922, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.03014777593540922 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-virology|5": { "acc": 0.27710843373493976, "acc_stderr": 0.034843315926805875, "acc_norm": 0.27710843373493976, "acc_norm_stderr": 0.034843315926805875 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2631578947368421, "acc_stderr": 0.03377310252209194, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.03377310252209194 }, "harness|truthfulqa:mc|0": { "mc1": 0.24969400244798043, "mc1_stderr": 0.015152286907148125, "mc2": 0.4018024343557987, "mc2_stderr": 0.01497649522857516 }, "harness|winogrande|5": { "acc": 0.5872138910812944, "acc_stderr": 0.0138370606486821 }, "harness|gsm8k|5": { "acc": 0.01592115238817286, "acc_stderr": 0.003447819272389016 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_bn22__tinyllama_frankenmerge
[ "region:us" ]
2024-01-04T13:05:07+00:00
{"pretty_name": "Evaluation run of bn22/tinyllama_frankenmerge", "dataset_summary": "Dataset automatically created during the evaluation run of model [bn22/tinyllama_frankenmerge](https://huggingface.co/bn22/tinyllama_frankenmerge) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_bn22__tinyllama_frankenmerge\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:03:17.010192](https://huggingface.co/datasets/open-llm-leaderboard/details_bn22__tinyllama_frankenmerge/blob/main/results_2024-01-04T13-03-17.010192.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2646989243388527,\n \"acc_stderr\": 0.031102353164814696,\n \"acc_norm\": 0.265973406703187,\n \"acc_norm_stderr\": 0.03187459379138897,\n \"mc1\": 0.24969400244798043,\n \"mc1_stderr\": 0.015152286907148125,\n \"mc2\": 0.4018024343557987,\n \"mc2_stderr\": 0.01497649522857516\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.2636518771331058,\n \"acc_stderr\": 0.012875929151297049,\n \"acc_norm\": 0.30204778156996587,\n \"acc_norm_stderr\": 0.013417519144716417\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.3995220075682135,\n \"acc_stderr\": 0.004887991225950274,\n \"acc_norm\": 0.5100577574188409,\n \"acc_norm_stderr\": 0.0049887717918545215\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.23703703703703705,\n \"acc_stderr\": 0.03673731683969506,\n \"acc_norm\": 0.23703703703703705,\n \"acc_norm_stderr\": 0.03673731683969506\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.19078947368421054,\n \"acc_stderr\": 0.031975658210325,\n \"acc_norm\": 0.19078947368421054,\n \"acc_norm_stderr\": 0.031975658210325\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542126,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542126\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.27547169811320754,\n \"acc_stderr\": 0.02749566368372407,\n \"acc_norm\": 0.27547169811320754,\n \"acc_norm_stderr\": 0.02749566368372407\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24277456647398843,\n \"acc_stderr\": 0.0326926380614177,\n \"acc_norm\": 0.24277456647398843,\n \"acc_norm_stderr\": 0.0326926380614177\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.19607843137254902,\n \"acc_stderr\": 0.03950581861179962,\n \"acc_norm\": 0.19607843137254902,\n \"acc_norm_stderr\": 0.03950581861179962\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2553191489361702,\n \"acc_stderr\": 0.02850485647051419,\n \"acc_norm\": 0.2553191489361702,\n \"acc_norm_stderr\": 0.02850485647051419\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.22807017543859648,\n \"acc_stderr\": 0.03947152782669415,\n \"acc_norm\": 0.22807017543859648,\n \"acc_norm_stderr\": 0.03947152782669415\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.033333333333333284,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.033333333333333284\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2804232804232804,\n \"acc_stderr\": 0.023135287974325635,\n \"acc_norm\": 0.2804232804232804,\n \"acc_norm_stderr\": 0.023135287974325635\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.18253968253968253,\n \"acc_stderr\": 0.03455071019102148,\n \"acc_norm\": 0.18253968253968253,\n \"acc_norm_stderr\": 0.03455071019102148\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.1967741935483871,\n \"acc_stderr\": 0.022616409420742018,\n \"acc_norm\": 0.1967741935483871,\n \"acc_norm_stderr\": 0.022616409420742018\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2019704433497537,\n \"acc_stderr\": 0.02824735012218027,\n \"acc_norm\": 0.2019704433497537,\n \"acc_norm_stderr\": 0.02824735012218027\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.04461960433384739,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.04461960433384739\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2606060606060606,\n \"acc_stderr\": 0.03427743175816524,\n \"acc_norm\": 0.2606060606060606,\n \"acc_norm_stderr\": 0.03427743175816524\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.25757575757575757,\n \"acc_stderr\": 0.031156269519646847,\n \"acc_norm\": 0.25757575757575757,\n \"acc_norm_stderr\": 0.031156269519646847\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.29533678756476683,\n \"acc_stderr\": 0.032922966391551386,\n \"acc_norm\": 0.29533678756476683,\n \"acc_norm_stderr\": 0.032922966391551386\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.36153846153846153,\n \"acc_stderr\": 0.024359581465396987,\n \"acc_norm\": 0.36153846153846153,\n \"acc_norm_stderr\": 0.024359581465396987\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.22962962962962963,\n \"acc_stderr\": 0.02564410863926763,\n \"acc_norm\": 0.22962962962962963,\n \"acc_norm_stderr\": 0.02564410863926763\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.2647058823529412,\n \"acc_stderr\": 0.02865749128507198,\n \"acc_norm\": 0.2647058823529412,\n \"acc_norm_stderr\": 0.02865749128507198\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.25165562913907286,\n \"acc_stderr\": 0.03543304234389985,\n \"acc_norm\": 0.25165562913907286,\n \"acc_norm_stderr\": 0.03543304234389985\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.22752293577981653,\n \"acc_stderr\": 0.017974463578776502,\n \"acc_norm\": 0.22752293577981653,\n \"acc_norm_stderr\": 0.017974463578776502\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03039153369274154,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03039153369274154\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.270042194092827,\n \"acc_stderr\": 0.028900721906293433,\n \"acc_norm\": 0.270042194092827,\n \"acc_norm_stderr\": 0.028900721906293433\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.35874439461883406,\n \"acc_stderr\": 0.032190792004199956,\n \"acc_norm\": 0.35874439461883406,\n \"acc_norm_stderr\": 0.032190792004199956\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.22137404580152673,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.22137404580152673,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2396694214876033,\n \"acc_stderr\": 0.03896878985070417,\n \"acc_norm\": 0.2396694214876033,\n \"acc_norm_stderr\": 0.03896878985070417\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2883435582822086,\n \"acc_stderr\": 0.035590395316173425,\n \"acc_norm\": 0.2883435582822086,\n \"acc_norm_stderr\": 0.035590395316173425\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.3125,\n \"acc_stderr\": 0.043994650575715215,\n \"acc_norm\": 0.3125,\n \"acc_norm_stderr\": 0.043994650575715215\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.1941747572815534,\n \"acc_stderr\": 0.03916667762822586,\n \"acc_norm\": 0.1941747572815534,\n \"acc_norm_stderr\": 0.03916667762822586\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.23504273504273504,\n \"acc_stderr\": 0.02777883590493543,\n \"acc_norm\": 0.23504273504273504,\n \"acc_norm_stderr\": 0.02777883590493543\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2515964240102171,\n \"acc_stderr\": 0.015517322365529615,\n \"acc_norm\": 0.2515964240102171,\n \"acc_norm_stderr\": 0.015517322365529615\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.25722543352601157,\n \"acc_stderr\": 0.023532925431044276,\n \"acc_norm\": 0.25722543352601157,\n \"acc_norm_stderr\": 0.023532925431044276\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.23798882681564246,\n \"acc_stderr\": 0.014242630070574915,\n \"acc_norm\": 0.23798882681564246,\n \"acc_norm_stderr\": 0.014242630070574915\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.024954184324879912,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.024954184324879912\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2604501607717042,\n \"acc_stderr\": 0.024926723224845557,\n \"acc_norm\": 0.2604501607717042,\n \"acc_norm_stderr\": 0.024926723224845557\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.02438366553103545,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.02438366553103545\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.23404255319148937,\n \"acc_stderr\": 0.025257861359432417,\n \"acc_norm\": 0.23404255319148937,\n \"acc_norm_stderr\": 0.025257861359432417\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2405475880052151,\n \"acc_stderr\": 0.010916406735478949,\n \"acc_norm\": 0.2405475880052151,\n \"acc_norm_stderr\": 0.010916406735478949\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.44485294117647056,\n \"acc_stderr\": 0.030187532060329376,\n \"acc_norm\": 0.44485294117647056,\n \"acc_norm_stderr\": 0.030187532060329376\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.21241830065359477,\n \"acc_stderr\": 0.016547148636203147,\n \"acc_norm\": 0.21241830065359477,\n \"acc_norm_stderr\": 0.016547148636203147\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.23636363636363636,\n \"acc_stderr\": 0.04069306319721377,\n \"acc_norm\": 0.23636363636363636,\n \"acc_norm_stderr\": 0.04069306319721377\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.1836734693877551,\n \"acc_stderr\": 0.024789071332007643,\n \"acc_norm\": 0.1836734693877551,\n \"acc_norm_stderr\": 0.024789071332007643\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23880597014925373,\n \"acc_stderr\": 0.03014777593540922,\n \"acc_norm\": 0.23880597014925373,\n \"acc_norm_stderr\": 0.03014777593540922\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.27710843373493976,\n \"acc_stderr\": 0.034843315926805875,\n \"acc_norm\": 0.27710843373493976,\n \"acc_norm_stderr\": 0.034843315926805875\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.03377310252209194,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.03377310252209194\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.24969400244798043,\n \"mc1_stderr\": 0.015152286907148125,\n \"mc2\": 0.4018024343557987,\n \"mc2_stderr\": 0.01497649522857516\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5872138910812944,\n \"acc_stderr\": 0.0138370606486821\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.01592115238817286,\n \"acc_stderr\": 0.003447819272389016\n }\n}\n```", "repo_url": "https://huggingface.co/bn22/tinyllama_frankenmerge", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-03-17.010192.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["**/details_harness|winogrande|5_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-03-17.010192.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_03_17.010192", "path": ["results_2024-01-04T13-03-17.010192.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-03-17.010192.parquet"]}]}]}
2024-01-04T13:05:31+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of bn22/tinyllama_frankenmerge Dataset automatically created during the evaluation run of model bn22/tinyllama_frankenmerge on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:03:17.010192(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of bn22/tinyllama_frankenmerge\n\n\n\nDataset automatically created during the evaluation run of model bn22/tinyllama_frankenmerge on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:03:17.010192(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of bn22/tinyllama_frankenmerge\n\n\n\nDataset automatically created during the evaluation run of model bn22/tinyllama_frankenmerge on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:03:17.010192(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 185, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of bn22/tinyllama_frankenmerge\n\n\n\nDataset automatically created during the evaluation run of model bn22/tinyllama_frankenmerge on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:03:17.010192(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
62f92d5020966ab2234240af0bfced691a5bca62
# Dataset Card for Evaluation run of TomGrc/FusionNet_linear <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [TomGrc/FusionNet_linear](https://huggingface.co/TomGrc/FusionNet_linear) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TomGrc__FusionNet_linear", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:05:33.197090](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FusionNet_linear/blob/main/results_2024-01-04T13-05-33.197090.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6672525697360521, "acc_stderr": 0.031625508279826374, "acc_norm": 0.6680745258089933, "acc_norm_stderr": 0.032268986108869066, "mc1": 0.5703794369645043, "mc1_stderr": 0.017329234580409095, "mc2": 0.7194464363694983, "mc2_stderr": 0.015001159262384656 }, "harness|arc:challenge|25": { "acc": 0.6834470989761092, "acc_stderr": 0.013592431519068079, "acc_norm": 0.712457337883959, "acc_norm_stderr": 0.013226719056266125 }, "harness|hellaswag|10": { "acc": 0.7132045409281019, "acc_stderr": 0.004513409114983827, "acc_norm": 0.8843855805616411, "acc_norm_stderr": 0.003191084792793155 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.44, "acc_stderr": 0.0498887651569859, "acc_norm": 0.44, "acc_norm_stderr": 0.0498887651569859 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.756578947368421, "acc_stderr": 0.034923496688842384, "acc_norm": 0.756578947368421, "acc_norm_stderr": 0.034923496688842384 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956913, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956913 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.625531914893617, "acc_stderr": 0.03163910665367291, "acc_norm": 0.625531914893617, "acc_norm_stderr": 0.03163910665367291 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6275862068965518, "acc_stderr": 0.04028731532947558, "acc_norm": 0.6275862068965518, "acc_norm_stderr": 0.04028731532947558 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4973544973544973, "acc_stderr": 0.02575094967813039, "acc_norm": 0.4973544973544973, "acc_norm_stderr": 0.02575094967813039 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8193548387096774, "acc_stderr": 0.021886178567172534, "acc_norm": 0.8193548387096774, "acc_norm_stderr": 0.021886178567172534 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.03517945038691063, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603348, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603348 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563976, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563976 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.02944316932303154, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.02944316932303154 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.029344572500634332, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.029344572500634332 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374308, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374308 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5740740740740741, "acc_stderr": 0.03372343271653062, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.03372343271653062 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156862, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156862 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8481012658227848, "acc_stderr": 0.023363878096632446, "acc_norm": 0.8481012658227848, "acc_norm_stderr": 0.023363878096632446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596915, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596915 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.03492606476623791, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.03492606476623791 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.0230866350868414, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.0230866350868414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8045977011494253, "acc_stderr": 0.014179171373424383, "acc_norm": 0.8045977011494253, "acc_norm_stderr": 0.014179171373424383 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7543352601156069, "acc_stderr": 0.023176298203992005, "acc_norm": 0.7543352601156069, "acc_norm_stderr": 0.023176298203992005 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.39217877094972065, "acc_stderr": 0.016329061073207446, "acc_norm": 0.39217877094972065, "acc_norm_stderr": 0.016329061073207446 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.761437908496732, "acc_stderr": 0.024404394928087866, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.024404394928087866 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.729903536977492, "acc_stderr": 0.02521804037341062, "acc_norm": 0.729903536977492, "acc_norm_stderr": 0.02521804037341062 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0227797190887334, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0227797190887334 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4921773142112125, "acc_stderr": 0.0127686730761119, "acc_norm": 0.4921773142112125, "acc_norm_stderr": 0.0127686730761119 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7389705882352942, "acc_stderr": 0.026679252270103128, "acc_norm": 0.7389705882352942, "acc_norm_stderr": 0.026679252270103128 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6781045751633987, "acc_stderr": 0.018901015322093092, "acc_norm": 0.6781045751633987, "acc_norm_stderr": 0.018901015322093092 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142783, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142783 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598053, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598053 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03188578017686398, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03188578017686398 }, "harness|truthfulqa:mc|0": { "mc1": 0.5703794369645043, "mc1_stderr": 0.017329234580409095, "mc2": 0.7194464363694983, "mc2_stderr": 0.015001159262384656 }, "harness|winogrande|5": { "acc": 0.8326756116811366, "acc_stderr": 0.010490608806828075 }, "harness|gsm8k|5": { "acc": 0.6535253980288097, "acc_stderr": 0.013107179054313398 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_TomGrc__FusionNet_linear
[ "region:us" ]
2024-01-04T13:07:49+00:00
{"pretty_name": "Evaluation run of TomGrc/FusionNet_linear", "dataset_summary": "Dataset automatically created during the evaluation run of model [TomGrc/FusionNet_linear](https://huggingface.co/TomGrc/FusionNet_linear) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TomGrc__FusionNet_linear\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:05:33.197090](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FusionNet_linear/blob/main/results_2024-01-04T13-05-33.197090.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6672525697360521,\n \"acc_stderr\": 0.031625508279826374,\n \"acc_norm\": 0.6680745258089933,\n \"acc_norm_stderr\": 0.032268986108869066,\n \"mc1\": 0.5703794369645043,\n \"mc1_stderr\": 0.017329234580409095,\n \"mc2\": 0.7194464363694983,\n \"mc2_stderr\": 0.015001159262384656\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6834470989761092,\n \"acc_stderr\": 0.013592431519068079,\n \"acc_norm\": 0.712457337883959,\n \"acc_norm_stderr\": 0.013226719056266125\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7132045409281019,\n \"acc_stderr\": 0.004513409114983827,\n \"acc_norm\": 0.8843855805616411,\n \"acc_norm_stderr\": 0.003191084792793155\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.0498887651569859,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.0498887651569859\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.756578947368421,\n \"acc_stderr\": 0.034923496688842384,\n \"acc_norm\": 0.756578947368421,\n \"acc_norm_stderr\": 0.034923496688842384\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956913,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956913\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.625531914893617,\n \"acc_stderr\": 0.03163910665367291,\n \"acc_norm\": 0.625531914893617,\n \"acc_norm_stderr\": 0.03163910665367291\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6275862068965518,\n \"acc_stderr\": 0.04028731532947558,\n \"acc_norm\": 0.6275862068965518,\n \"acc_norm_stderr\": 0.04028731532947558\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4973544973544973,\n \"acc_stderr\": 0.02575094967813039,\n \"acc_norm\": 0.4973544973544973,\n \"acc_norm_stderr\": 0.02575094967813039\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8193548387096774,\n \"acc_stderr\": 0.021886178567172534,\n \"acc_norm\": 0.8193548387096774,\n \"acc_norm_stderr\": 0.021886178567172534\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563976,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563976\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.02944316932303154,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.02944316932303154\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.029344572500634332,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.029344572500634332\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374308,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374308\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.03372343271653062,\n \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.03372343271653062\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156862,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156862\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8481012658227848,\n \"acc_stderr\": 0.023363878096632446,\n \"acc_norm\": 0.8481012658227848,\n \"acc_norm_stderr\": 0.023363878096632446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596915,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596915\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.03492606476623791,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.03492606476623791\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.0230866350868414,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.0230866350868414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8045977011494253,\n \"acc_stderr\": 0.014179171373424383,\n \"acc_norm\": 0.8045977011494253,\n \"acc_norm_stderr\": 0.014179171373424383\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7543352601156069,\n \"acc_stderr\": 0.023176298203992005,\n \"acc_norm\": 0.7543352601156069,\n \"acc_norm_stderr\": 0.023176298203992005\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.39217877094972065,\n \"acc_stderr\": 0.016329061073207446,\n \"acc_norm\": 0.39217877094972065,\n \"acc_norm_stderr\": 0.016329061073207446\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.761437908496732,\n \"acc_stderr\": 0.024404394928087866,\n \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.024404394928087866\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.729903536977492,\n \"acc_stderr\": 0.02521804037341062,\n \"acc_norm\": 0.729903536977492,\n \"acc_norm_stderr\": 0.02521804037341062\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0227797190887334,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0227797190887334\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4921773142112125,\n \"acc_stderr\": 0.0127686730761119,\n \"acc_norm\": 0.4921773142112125,\n \"acc_norm_stderr\": 0.0127686730761119\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7389705882352942,\n \"acc_stderr\": 0.026679252270103128,\n \"acc_norm\": 0.7389705882352942,\n \"acc_norm_stderr\": 0.026679252270103128\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6781045751633987,\n \"acc_stderr\": 0.018901015322093092,\n \"acc_norm\": 0.6781045751633987,\n \"acc_norm_stderr\": 0.018901015322093092\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142783,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142783\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03188578017686398,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03188578017686398\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5703794369645043,\n \"mc1_stderr\": 0.017329234580409095,\n \"mc2\": 0.7194464363694983,\n \"mc2_stderr\": 0.015001159262384656\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8326756116811366,\n \"acc_stderr\": 0.010490608806828075\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6535253980288097,\n \"acc_stderr\": 0.013107179054313398\n }\n}\n```", "repo_url": "https://huggingface.co/TomGrc/FusionNet_linear", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-05-33.197090.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["**/details_harness|winogrande|5_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-05-33.197090.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_05_33.197090", "path": ["results_2024-01-04T13-05-33.197090.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-05-33.197090.parquet"]}]}]}
2024-01-04T13:08:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TomGrc/FusionNet_linear Dataset automatically created during the evaluation run of model TomGrc/FusionNet_linear on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:05:33.197090(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of TomGrc/FusionNet_linear\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet_linear on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:05:33.197090(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TomGrc/FusionNet_linear\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet_linear on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:05:33.197090(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TomGrc/FusionNet_linear\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet_linear on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:05:33.197090(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
b9284665973eb358421ee81130fc570b04ad206b
# Dataset Card for Evaluation run of decapoda-research/Antares-11b-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [decapoda-research/Antares-11b-v1](https://huggingface.co/decapoda-research/Antares-11b-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_decapoda-research__Antares-11b-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:13:13.296577](https://huggingface.co/datasets/open-llm-leaderboard/details_decapoda-research__Antares-11b-v1/blob/main/results_2024-01-04T13-13-13.296577.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6602238370348961, "acc_stderr": 0.03146617343256451, "acc_norm": 0.6625896368336766, "acc_norm_stderr": 0.03209888448148018, "mc1": 0.36964504283965727, "mc1_stderr": 0.016898180706973884, "mc2": 0.5283649819747338, "mc2_stderr": 0.015000610527158549 }, "harness|arc:challenge|25": { "acc": 0.6083617747440273, "acc_stderr": 0.014264122124938215, "acc_norm": 0.6450511945392492, "acc_norm_stderr": 0.013983036904094089 }, "harness|hellaswag|10": { "acc": 0.6535550687114121, "acc_stderr": 0.0047486451332815725, "acc_norm": 0.8485361481776539, "acc_norm_stderr": 0.0035776774950640926 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.562962962962963, "acc_stderr": 0.04284958639753401, "acc_norm": 0.562962962962963, "acc_norm_stderr": 0.04284958639753401 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7828947368421053, "acc_stderr": 0.03355045304882924, "acc_norm": 0.7828947368421053, "acc_norm_stderr": 0.03355045304882924 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6716981132075471, "acc_stderr": 0.02890159361241178, "acc_norm": 0.6716981132075471, "acc_norm_stderr": 0.02890159361241178 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.035868792800803406, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.035868792800803406 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6358381502890174, "acc_stderr": 0.03669072477416906, "acc_norm": 0.6358381502890174, "acc_norm_stderr": 0.03669072477416906 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062946, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062946 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.625531914893617, "acc_stderr": 0.031639106653672915, "acc_norm": 0.625531914893617, "acc_norm_stderr": 0.031639106653672915 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370332, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370332 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.46825396825396826, "acc_stderr": 0.0256993528321318, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.0256993528321318 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8193548387096774, "acc_stderr": 0.021886178567172537, "acc_norm": 0.8193548387096774, "acc_norm_stderr": 0.021886178567172537 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7878787878787878, "acc_stderr": 0.03192271569548301, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.03192271569548301 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9119170984455959, "acc_stderr": 0.02045374660160103, "acc_norm": 0.9119170984455959, "acc_norm_stderr": 0.02045374660160103 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563976, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563976 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.028820884666253252, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.028820884666253252 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7016806722689075, "acc_stderr": 0.029719142876342856, "acc_norm": 0.7016806722689075, "acc_norm_stderr": 0.029719142876342856 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3576158940397351, "acc_stderr": 0.03913453431177258, "acc_norm": 0.3576158940397351, "acc_norm_stderr": 0.03913453431177258 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8568807339449541, "acc_stderr": 0.015014462497168583, "acc_norm": 0.8568807339449541, "acc_norm_stderr": 0.015014462497168583 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5509259259259259, "acc_stderr": 0.03392238405321617, "acc_norm": 0.5509259259259259, "acc_norm_stderr": 0.03392238405321617 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.024509803921568624, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.024509803921568624 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8818565400843882, "acc_stderr": 0.021011052659878463, "acc_norm": 0.8818565400843882, "acc_norm_stderr": 0.021011052659878463 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7174887892376681, "acc_stderr": 0.03021683101150877, "acc_norm": 0.7174887892376681, "acc_norm_stderr": 0.03021683101150877 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596914, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596914 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7962962962962963, "acc_stderr": 0.03893542518824847, "acc_norm": 0.7962962962962963, "acc_norm_stderr": 0.03893542518824847 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7668711656441718, "acc_stderr": 0.0332201579577674, "acc_norm": 0.7668711656441718, "acc_norm_stderr": 0.0332201579577674 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5357142857142857, "acc_stderr": 0.04733667890053756, "acc_norm": 0.5357142857142857, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.03675668832233188, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.03675668832233188 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406943, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406943 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8250319284802043, "acc_stderr": 0.013586619219903335, "acc_norm": 0.8250319284802043, "acc_norm_stderr": 0.013586619219903335 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7456647398843931, "acc_stderr": 0.023445826276545543, "acc_norm": 0.7456647398843931, "acc_norm_stderr": 0.023445826276545543 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2335195530726257, "acc_stderr": 0.014149575348976266, "acc_norm": 0.2335195530726257, "acc_norm_stderr": 0.014149575348976266 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7581699346405228, "acc_stderr": 0.024518195641879334, "acc_norm": 0.7581699346405228, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.02575586592263294, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.02575586592263294 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7623456790123457, "acc_stderr": 0.023683591837008553, "acc_norm": 0.7623456790123457, "acc_norm_stderr": 0.023683591837008553 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5212765957446809, "acc_stderr": 0.029800481645628693, "acc_norm": 0.5212765957446809, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.500651890482399, "acc_stderr": 0.01277022525225556, "acc_norm": 0.500651890482399, "acc_norm_stderr": 0.01277022525225556 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7279411764705882, "acc_stderr": 0.027033041151681456, "acc_norm": 0.7279411764705882, "acc_norm_stderr": 0.027033041151681456 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6781045751633987, "acc_stderr": 0.018901015322093092, "acc_norm": 0.6781045751633987, "acc_norm_stderr": 0.018901015322093092 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7510204081632653, "acc_stderr": 0.027682979522960234, "acc_norm": 0.7510204081632653, "acc_norm_stderr": 0.027682979522960234 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8606965174129353, "acc_stderr": 0.024484487162913973, "acc_norm": 0.8606965174129353, "acc_norm_stderr": 0.024484487162913973 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.028762349126466115, "acc_norm": 0.91, "acc_norm_stderr": 0.028762349126466115 }, "harness|hendrycksTest-virology|5": { "acc": 0.5783132530120482, "acc_stderr": 0.038444531817709175, "acc_norm": 0.5783132530120482, "acc_norm_stderr": 0.038444531817709175 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7894736842105263, "acc_stderr": 0.03126781714663179, "acc_norm": 0.7894736842105263, "acc_norm_stderr": 0.03126781714663179 }, "harness|truthfulqa:mc|0": { "mc1": 0.36964504283965727, "mc1_stderr": 0.016898180706973884, "mc2": 0.5283649819747338, "mc2_stderr": 0.015000610527158549 }, "harness|winogrande|5": { "acc": 0.829518547750592, "acc_stderr": 0.010569021122825888 }, "harness|gsm8k|5": { "acc": 0.5830174374526156, "acc_stderr": 0.013581320997216586 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_decapoda-research__Antares-11b-v1
[ "region:us" ]
2024-01-04T13:15:29+00:00
{"pretty_name": "Evaluation run of decapoda-research/Antares-11b-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [decapoda-research/Antares-11b-v1](https://huggingface.co/decapoda-research/Antares-11b-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_decapoda-research__Antares-11b-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:13:13.296577](https://huggingface.co/datasets/open-llm-leaderboard/details_decapoda-research__Antares-11b-v1/blob/main/results_2024-01-04T13-13-13.296577.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6602238370348961,\n \"acc_stderr\": 0.03146617343256451,\n \"acc_norm\": 0.6625896368336766,\n \"acc_norm_stderr\": 0.03209888448148018,\n \"mc1\": 0.36964504283965727,\n \"mc1_stderr\": 0.016898180706973884,\n \"mc2\": 0.5283649819747338,\n \"mc2_stderr\": 0.015000610527158549\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6083617747440273,\n \"acc_stderr\": 0.014264122124938215,\n \"acc_norm\": 0.6450511945392492,\n \"acc_norm_stderr\": 0.013983036904094089\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6535550687114121,\n \"acc_stderr\": 0.0047486451332815725,\n \"acc_norm\": 0.8485361481776539,\n \"acc_norm_stderr\": 0.0035776774950640926\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n \"acc_stderr\": 0.04284958639753401,\n \"acc_norm\": 0.562962962962963,\n \"acc_norm_stderr\": 0.04284958639753401\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7828947368421053,\n \"acc_stderr\": 0.03355045304882924,\n \"acc_norm\": 0.7828947368421053,\n \"acc_norm_stderr\": 0.03355045304882924\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6716981132075471,\n \"acc_stderr\": 0.02890159361241178,\n \"acc_norm\": 0.6716981132075471,\n \"acc_norm_stderr\": 0.02890159361241178\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.035868792800803406,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.035868792800803406\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6358381502890174,\n \"acc_stderr\": 0.03669072477416906,\n \"acc_norm\": 0.6358381502890174,\n \"acc_norm_stderr\": 0.03669072477416906\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.04755129616062946,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.04755129616062946\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.625531914893617,\n \"acc_stderr\": 0.031639106653672915,\n \"acc_norm\": 0.625531914893617,\n \"acc_norm_stderr\": 0.031639106653672915\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370332,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370332\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.0256993528321318,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.0256993528321318\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8193548387096774,\n \"acc_stderr\": 0.021886178567172537,\n \"acc_norm\": 0.8193548387096774,\n \"acc_norm_stderr\": 0.021886178567172537\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.03192271569548301,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.03192271569548301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9119170984455959,\n \"acc_stderr\": 0.02045374660160103,\n \"acc_norm\": 0.9119170984455959,\n \"acc_norm_stderr\": 0.02045374660160103\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563976,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563976\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253252,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253252\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7016806722689075,\n \"acc_stderr\": 0.029719142876342856,\n \"acc_norm\": 0.7016806722689075,\n \"acc_norm_stderr\": 0.029719142876342856\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3576158940397351,\n \"acc_stderr\": 0.03913453431177258,\n \"acc_norm\": 0.3576158940397351,\n \"acc_norm_stderr\": 0.03913453431177258\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8568807339449541,\n \"acc_stderr\": 0.015014462497168583,\n \"acc_norm\": 0.8568807339449541,\n \"acc_norm_stderr\": 0.015014462497168583\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5509259259259259,\n \"acc_stderr\": 0.03392238405321617,\n \"acc_norm\": 0.5509259259259259,\n \"acc_norm_stderr\": 0.03392238405321617\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.024509803921568624,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.024509803921568624\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8818565400843882,\n \"acc_stderr\": 0.021011052659878463,\n \"acc_norm\": 0.8818565400843882,\n \"acc_norm_stderr\": 0.021011052659878463\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7174887892376681,\n \"acc_stderr\": 0.03021683101150877,\n \"acc_norm\": 0.7174887892376681,\n \"acc_norm_stderr\": 0.03021683101150877\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596914,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596914\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7962962962962963,\n \"acc_stderr\": 0.03893542518824847,\n \"acc_norm\": 0.7962962962962963,\n \"acc_norm_stderr\": 0.03893542518824847\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7668711656441718,\n \"acc_stderr\": 0.0332201579577674,\n \"acc_norm\": 0.7668711656441718,\n \"acc_norm_stderr\": 0.0332201579577674\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5357142857142857,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.5357142857142857,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.03675668832233188,\n \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.03675668832233188\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406943,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406943\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8250319284802043,\n \"acc_stderr\": 0.013586619219903335,\n \"acc_norm\": 0.8250319284802043,\n \"acc_norm_stderr\": 0.013586619219903335\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7456647398843931,\n \"acc_stderr\": 0.023445826276545543,\n \"acc_norm\": 0.7456647398843931,\n \"acc_norm_stderr\": 0.023445826276545543\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2335195530726257,\n \"acc_stderr\": 0.014149575348976266,\n \"acc_norm\": 0.2335195530726257,\n \"acc_norm_stderr\": 0.014149575348976266\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7581699346405228,\n \"acc_stderr\": 0.024518195641879334,\n \"acc_norm\": 0.7581699346405228,\n \"acc_norm_stderr\": 0.024518195641879334\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.02575586592263294,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.02575586592263294\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7623456790123457,\n \"acc_stderr\": 0.023683591837008553,\n \"acc_norm\": 0.7623456790123457,\n \"acc_norm_stderr\": 0.023683591837008553\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5212765957446809,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.5212765957446809,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.500651890482399,\n \"acc_stderr\": 0.01277022525225556,\n \"acc_norm\": 0.500651890482399,\n \"acc_norm_stderr\": 0.01277022525225556\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7279411764705882,\n \"acc_stderr\": 0.027033041151681456,\n \"acc_norm\": 0.7279411764705882,\n \"acc_norm_stderr\": 0.027033041151681456\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6781045751633987,\n \"acc_stderr\": 0.018901015322093092,\n \"acc_norm\": 0.6781045751633987,\n \"acc_norm_stderr\": 0.018901015322093092\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7510204081632653,\n \"acc_stderr\": 0.027682979522960234,\n \"acc_norm\": 0.7510204081632653,\n \"acc_norm_stderr\": 0.027682979522960234\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8606965174129353,\n \"acc_stderr\": 0.024484487162913973,\n \"acc_norm\": 0.8606965174129353,\n \"acc_norm_stderr\": 0.024484487162913973\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466115,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466115\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5783132530120482,\n \"acc_stderr\": 0.038444531817709175,\n \"acc_norm\": 0.5783132530120482,\n \"acc_norm_stderr\": 0.038444531817709175\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7894736842105263,\n \"acc_stderr\": 0.03126781714663179,\n \"acc_norm\": 0.7894736842105263,\n \"acc_norm_stderr\": 0.03126781714663179\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.36964504283965727,\n \"mc1_stderr\": 0.016898180706973884,\n \"mc2\": 0.5283649819747338,\n \"mc2_stderr\": 0.015000610527158549\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.829518547750592,\n \"acc_stderr\": 0.010569021122825888\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5830174374526156,\n \"acc_stderr\": 0.013581320997216586\n }\n}\n```", "repo_url": "https://huggingface.co/decapoda-research/Antares-11b-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-13-13.296577.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["**/details_harness|winogrande|5_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-13-13.296577.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_13_13.296577", "path": ["results_2024-01-04T13-13-13.296577.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-13-13.296577.parquet"]}]}]}
2024-01-04T13:15:52+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of decapoda-research/Antares-11b-v1 Dataset automatically created during the evaluation run of model decapoda-research/Antares-11b-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:13:13.296577(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of decapoda-research/Antares-11b-v1\n\n\n\nDataset automatically created during the evaluation run of model decapoda-research/Antares-11b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:13:13.296577(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of decapoda-research/Antares-11b-v1\n\n\n\nDataset automatically created during the evaluation run of model decapoda-research/Antares-11b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:13:13.296577(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of decapoda-research/Antares-11b-v1\n\n\n\nDataset automatically created during the evaluation run of model decapoda-research/Antares-11b-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:13:13.296577(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
31d680845e3acf6063ad3a0d97d5bb60a16d467d
# Dataset Card for Evaluation run of NECOUDBFM/Jellyfish <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [NECOUDBFM/Jellyfish](https://huggingface.co/NECOUDBFM/Jellyfish) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NECOUDBFM__Jellyfish", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:13:32.015225](https://huggingface.co/datasets/open-llm-leaderboard/details_NECOUDBFM__Jellyfish/blob/main/results_2024-01-04T13-13-32.015225.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5838417406503037, "acc_stderr": 0.03349211822062263, "acc_norm": 0.5910129283787261, "acc_norm_stderr": 0.034201347391937635, "mc1": 0.3806609547123623, "mc1_stderr": 0.016997627871907926, "mc2": 0.5331776960352129, "mc2_stderr": 0.015573496563932199 }, "harness|arc:challenge|25": { "acc": 0.5836177474402731, "acc_stderr": 0.014405618279436169, "acc_norm": 0.6331058020477816, "acc_norm_stderr": 0.014084133118104296 }, "harness|hellaswag|10": { "acc": 0.6309500099581756, "acc_stderr": 0.0048156131443854045, "acc_norm": 0.8319059948217487, "acc_norm_stderr": 0.003731854957030938 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.4888888888888889, "acc_stderr": 0.04318275491977976, "acc_norm": 0.4888888888888889, "acc_norm_stderr": 0.04318275491977976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.631578947368421, "acc_stderr": 0.03925523381052932, "acc_norm": 0.631578947368421, "acc_norm_stderr": 0.03925523381052932 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6452830188679245, "acc_stderr": 0.029445175328199593, "acc_norm": 0.6452830188679245, "acc_norm_stderr": 0.029445175328199593 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6527777777777778, "acc_stderr": 0.03981240543717861, "acc_norm": 0.6527777777777778, "acc_norm_stderr": 0.03981240543717861 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.04960449637488584, "acc_norm": 0.42, "acc_norm_stderr": 0.04960449637488584 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5144508670520231, "acc_stderr": 0.03810871630454764, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.03810871630454764 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082636, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082636 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.64, "acc_stderr": 0.04824181513244218, "acc_norm": 0.64, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4851063829787234, "acc_stderr": 0.032671518489247764, "acc_norm": 0.4851063829787234, "acc_norm_stderr": 0.032671518489247764 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.34210526315789475, "acc_stderr": 0.04462917535336937, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.04462917535336937 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4896551724137931, "acc_stderr": 0.04165774775728763, "acc_norm": 0.4896551724137931, "acc_norm_stderr": 0.04165774775728763 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.024278568024307695, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.024278568024307695 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6548387096774193, "acc_stderr": 0.027045746573534323, "acc_norm": 0.6548387096774193, "acc_norm_stderr": 0.027045746573534323 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.45320197044334976, "acc_stderr": 0.03502544650845872, "acc_norm": 0.45320197044334976, "acc_norm_stderr": 0.03502544650845872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7272727272727273, "acc_stderr": 0.0347769116216366, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.0347769116216366 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7424242424242424, "acc_stderr": 0.031156269519646836, "acc_norm": 0.7424242424242424, "acc_norm_stderr": 0.031156269519646836 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8549222797927462, "acc_stderr": 0.025416343096306426, "acc_norm": 0.8549222797927462, "acc_norm_stderr": 0.025416343096306426 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6333333333333333, "acc_stderr": 0.02443301646605246, "acc_norm": 0.6333333333333333, "acc_norm_stderr": 0.02443301646605246 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.34074074074074073, "acc_stderr": 0.02889774874113114, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.02889774874113114 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6302521008403361, "acc_stderr": 0.031357095996135904, "acc_norm": 0.6302521008403361, "acc_norm_stderr": 0.031357095996135904 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7981651376146789, "acc_stderr": 0.017208579357787596, "acc_norm": 0.7981651376146789, "acc_norm_stderr": 0.017208579357787596 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8235294117647058, "acc_stderr": 0.026756401538078962, "acc_norm": 0.8235294117647058, "acc_norm_stderr": 0.026756401538078962 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7637130801687764, "acc_stderr": 0.02765215314415927, "acc_norm": 0.7637130801687764, "acc_norm_stderr": 0.02765215314415927 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6457399103139013, "acc_stderr": 0.032100621541349864, "acc_norm": 0.6457399103139013, "acc_norm_stderr": 0.032100621541349864 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6641221374045801, "acc_stderr": 0.041423137719966634, "acc_norm": 0.6641221374045801, "acc_norm_stderr": 0.041423137719966634 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7024793388429752, "acc_stderr": 0.04173349148083499, "acc_norm": 0.7024793388429752, "acc_norm_stderr": 0.04173349148083499 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7055214723926381, "acc_stderr": 0.03581165790474082, "acc_norm": 0.7055214723926381, "acc_norm_stderr": 0.03581165790474082 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.35714285714285715, "acc_stderr": 0.04547960999764376, "acc_norm": 0.35714285714285715, "acc_norm_stderr": 0.04547960999764376 }, "harness|hendrycksTest-management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8247863247863247, "acc_stderr": 0.02490443909891823, "acc_norm": 0.8247863247863247, "acc_norm_stderr": 0.02490443909891823 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7611749680715197, "acc_stderr": 0.015246803197398691, "acc_norm": 0.7611749680715197, "acc_norm_stderr": 0.015246803197398691 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6589595375722543, "acc_stderr": 0.025522474632121612, "acc_norm": 0.6589595375722543, "acc_norm_stderr": 0.025522474632121612 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4960893854748603, "acc_stderr": 0.016721990073156657, "acc_norm": 0.4960893854748603, "acc_norm_stderr": 0.016721990073156657 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6241830065359477, "acc_stderr": 0.027732834353363947, "acc_norm": 0.6241830065359477, "acc_norm_stderr": 0.027732834353363947 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6881028938906752, "acc_stderr": 0.02631185807185416, "acc_norm": 0.6881028938906752, "acc_norm_stderr": 0.02631185807185416 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7222222222222222, "acc_stderr": 0.024922001168886335, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.024922001168886335 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46099290780141844, "acc_stderr": 0.029736592526424438, "acc_norm": 0.46099290780141844, "acc_norm_stderr": 0.029736592526424438 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4589308996088657, "acc_stderr": 0.012727084826799802, "acc_norm": 0.4589308996088657, "acc_norm_stderr": 0.012727084826799802 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5919117647058824, "acc_stderr": 0.029855261393483924, "acc_norm": 0.5919117647058824, "acc_norm_stderr": 0.029855261393483924 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6062091503267973, "acc_stderr": 0.019766211991073063, "acc_norm": 0.6062091503267973, "acc_norm_stderr": 0.019766211991073063 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.046075820907199756, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.046075820907199756 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6204081632653061, "acc_stderr": 0.031067211262872475, "acc_norm": 0.6204081632653061, "acc_norm_stderr": 0.031067211262872475 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7213930348258707, "acc_stderr": 0.031700561834973086, "acc_norm": 0.7213930348258707, "acc_norm_stderr": 0.031700561834973086 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.81, "acc_stderr": 0.03942772444036625, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036625 }, "harness|hendrycksTest-virology|5": { "acc": 0.45180722891566266, "acc_stderr": 0.03874371556587953, "acc_norm": 0.45180722891566266, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7660818713450293, "acc_stderr": 0.03246721765117826, "acc_norm": 0.7660818713450293, "acc_norm_stderr": 0.03246721765117826 }, "harness|truthfulqa:mc|0": { "mc1": 0.3806609547123623, "mc1_stderr": 0.016997627871907926, "mc2": 0.5331776960352129, "mc2_stderr": 0.015573496563932199 }, "harness|winogrande|5": { "acc": 0.7584846093133386, "acc_stderr": 0.012028983782011874 }, "harness|gsm8k|5": { "acc": 0.23654283548142532, "acc_stderr": 0.01170548820296166 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_NECOUDBFM__Jellyfish
[ "region:us" ]
2024-01-04T13:15:49+00:00
{"pretty_name": "Evaluation run of NECOUDBFM/Jellyfish", "dataset_summary": "Dataset automatically created during the evaluation run of model [NECOUDBFM/Jellyfish](https://huggingface.co/NECOUDBFM/Jellyfish) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NECOUDBFM__Jellyfish\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:13:32.015225](https://huggingface.co/datasets/open-llm-leaderboard/details_NECOUDBFM__Jellyfish/blob/main/results_2024-01-04T13-13-32.015225.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5838417406503037,\n \"acc_stderr\": 0.03349211822062263,\n \"acc_norm\": 0.5910129283787261,\n \"acc_norm_stderr\": 0.034201347391937635,\n \"mc1\": 0.3806609547123623,\n \"mc1_stderr\": 0.016997627871907926,\n \"mc2\": 0.5331776960352129,\n \"mc2_stderr\": 0.015573496563932199\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5836177474402731,\n \"acc_stderr\": 0.014405618279436169,\n \"acc_norm\": 0.6331058020477816,\n \"acc_norm_stderr\": 0.014084133118104296\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6309500099581756,\n \"acc_stderr\": 0.0048156131443854045,\n \"acc_norm\": 0.8319059948217487,\n \"acc_norm_stderr\": 0.003731854957030938\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.4888888888888889,\n \"acc_stderr\": 0.04318275491977976,\n \"acc_norm\": 0.4888888888888889,\n \"acc_norm_stderr\": 0.04318275491977976\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.631578947368421,\n \"acc_stderr\": 0.03925523381052932,\n \"acc_norm\": 0.631578947368421,\n \"acc_norm_stderr\": 0.03925523381052932\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6452830188679245,\n \"acc_stderr\": 0.029445175328199593,\n \"acc_norm\": 0.6452830188679245,\n \"acc_norm_stderr\": 0.029445175328199593\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6527777777777778,\n \"acc_stderr\": 0.03981240543717861,\n \"acc_norm\": 0.6527777777777778,\n \"acc_norm_stderr\": 0.03981240543717861\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.04960449637488584,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.04960449637488584\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5144508670520231,\n \"acc_stderr\": 0.03810871630454764,\n \"acc_norm\": 0.5144508670520231,\n \"acc_norm_stderr\": 0.03810871630454764\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082636,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082636\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.64,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.64,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4851063829787234,\n \"acc_stderr\": 0.032671518489247764,\n \"acc_norm\": 0.4851063829787234,\n \"acc_norm_stderr\": 0.032671518489247764\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.34210526315789475,\n \"acc_stderr\": 0.04462917535336937,\n \"acc_norm\": 0.34210526315789475,\n \"acc_norm_stderr\": 0.04462917535336937\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4896551724137931,\n \"acc_stderr\": 0.04165774775728763,\n \"acc_norm\": 0.4896551724137931,\n \"acc_norm_stderr\": 0.04165774775728763\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.024278568024307695,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.024278568024307695\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6548387096774193,\n \"acc_stderr\": 0.027045746573534323,\n \"acc_norm\": 0.6548387096774193,\n \"acc_norm_stderr\": 0.027045746573534323\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.45320197044334976,\n \"acc_stderr\": 0.03502544650845872,\n \"acc_norm\": 0.45320197044334976,\n \"acc_norm_stderr\": 0.03502544650845872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.0347769116216366,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.0347769116216366\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7424242424242424,\n \"acc_stderr\": 0.031156269519646836,\n \"acc_norm\": 0.7424242424242424,\n \"acc_norm_stderr\": 0.031156269519646836\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8549222797927462,\n \"acc_stderr\": 0.025416343096306426,\n \"acc_norm\": 0.8549222797927462,\n \"acc_norm_stderr\": 0.025416343096306426\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6333333333333333,\n \"acc_stderr\": 0.02443301646605246,\n \"acc_norm\": 0.6333333333333333,\n \"acc_norm_stderr\": 0.02443301646605246\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.02889774874113114,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.02889774874113114\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6302521008403361,\n \"acc_stderr\": 0.031357095996135904,\n \"acc_norm\": 0.6302521008403361,\n \"acc_norm_stderr\": 0.031357095996135904\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7981651376146789,\n \"acc_stderr\": 0.017208579357787596,\n \"acc_norm\": 0.7981651376146789,\n \"acc_norm_stderr\": 0.017208579357787596\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8235294117647058,\n \"acc_stderr\": 0.026756401538078962,\n \"acc_norm\": 0.8235294117647058,\n \"acc_norm_stderr\": 0.026756401538078962\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7637130801687764,\n \"acc_stderr\": 0.02765215314415927,\n \"acc_norm\": 0.7637130801687764,\n \"acc_norm_stderr\": 0.02765215314415927\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6457399103139013,\n \"acc_stderr\": 0.032100621541349864,\n \"acc_norm\": 0.6457399103139013,\n \"acc_norm_stderr\": 0.032100621541349864\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6641221374045801,\n \"acc_stderr\": 0.041423137719966634,\n \"acc_norm\": 0.6641221374045801,\n \"acc_norm_stderr\": 0.041423137719966634\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7024793388429752,\n \"acc_stderr\": 0.04173349148083499,\n \"acc_norm\": 0.7024793388429752,\n \"acc_norm_stderr\": 0.04173349148083499\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7055214723926381,\n \"acc_stderr\": 0.03581165790474082,\n \"acc_norm\": 0.7055214723926381,\n \"acc_norm_stderr\": 0.03581165790474082\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.35714285714285715,\n \"acc_stderr\": 0.04547960999764376,\n \"acc_norm\": 0.35714285714285715,\n \"acc_norm_stderr\": 0.04547960999764376\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8247863247863247,\n \"acc_stderr\": 0.02490443909891823,\n \"acc_norm\": 0.8247863247863247,\n \"acc_norm_stderr\": 0.02490443909891823\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7611749680715197,\n \"acc_stderr\": 0.015246803197398691,\n \"acc_norm\": 0.7611749680715197,\n \"acc_norm_stderr\": 0.015246803197398691\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6589595375722543,\n \"acc_stderr\": 0.025522474632121612,\n \"acc_norm\": 0.6589595375722543,\n \"acc_norm_stderr\": 0.025522474632121612\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4960893854748603,\n \"acc_stderr\": 0.016721990073156657,\n \"acc_norm\": 0.4960893854748603,\n \"acc_norm_stderr\": 0.016721990073156657\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6241830065359477,\n \"acc_stderr\": 0.027732834353363947,\n \"acc_norm\": 0.6241830065359477,\n \"acc_norm_stderr\": 0.027732834353363947\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6881028938906752,\n \"acc_stderr\": 0.02631185807185416,\n \"acc_norm\": 0.6881028938906752,\n \"acc_norm_stderr\": 0.02631185807185416\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.024922001168886335,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.024922001168886335\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46099290780141844,\n \"acc_stderr\": 0.029736592526424438,\n \"acc_norm\": 0.46099290780141844,\n \"acc_norm_stderr\": 0.029736592526424438\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4589308996088657,\n \"acc_stderr\": 0.012727084826799802,\n \"acc_norm\": 0.4589308996088657,\n \"acc_norm_stderr\": 0.012727084826799802\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5919117647058824,\n \"acc_stderr\": 0.029855261393483924,\n \"acc_norm\": 0.5919117647058824,\n \"acc_norm_stderr\": 0.029855261393483924\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6062091503267973,\n \"acc_stderr\": 0.019766211991073063,\n \"acc_norm\": 0.6062091503267973,\n \"acc_norm_stderr\": 0.019766211991073063\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.046075820907199756,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.046075820907199756\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6204081632653061,\n \"acc_stderr\": 0.031067211262872475,\n \"acc_norm\": 0.6204081632653061,\n \"acc_norm_stderr\": 0.031067211262872475\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7213930348258707,\n \"acc_stderr\": 0.031700561834973086,\n \"acc_norm\": 0.7213930348258707,\n \"acc_norm_stderr\": 0.031700561834973086\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036625,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036625\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.45180722891566266,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.45180722891566266,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7660818713450293,\n \"acc_stderr\": 0.03246721765117826,\n \"acc_norm\": 0.7660818713450293,\n \"acc_norm_stderr\": 0.03246721765117826\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3806609547123623,\n \"mc1_stderr\": 0.016997627871907926,\n \"mc2\": 0.5331776960352129,\n \"mc2_stderr\": 0.015573496563932199\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7584846093133386,\n \"acc_stderr\": 0.012028983782011874\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.23654283548142532,\n \"acc_stderr\": 0.01170548820296166\n }\n}\n```", "repo_url": "https://huggingface.co/NECOUDBFM/Jellyfish", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-13-32.015225.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["**/details_harness|winogrande|5_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-13-32.015225.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_13_32.015225", "path": ["results_2024-01-04T13-13-32.015225.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-13-32.015225.parquet"]}]}]}
2024-01-04T13:16:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of NECOUDBFM/Jellyfish Dataset automatically created during the evaluation run of model NECOUDBFM/Jellyfish on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:13:32.015225(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of NECOUDBFM/Jellyfish\n\n\n\nDataset automatically created during the evaluation run of model NECOUDBFM/Jellyfish on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:13:32.015225(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of NECOUDBFM/Jellyfish\n\n\n\nDataset automatically created during the evaluation run of model NECOUDBFM/Jellyfish on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:13:32.015225(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 179, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of NECOUDBFM/Jellyfish\n\n\n\nDataset automatically created during the evaluation run of model NECOUDBFM/Jellyfish on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:13:32.015225(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
da92a903b62e3df064d5df3c241bf491036453cc
# airoboros-3.1-Turkish ``` Dataset Cost: USD 100.8 Translated with: gpt-3.5-turbo-1106 Elapsed Time: 1 hours 23 minutes ``` ## Metrics: ``` English Token Count: 23.963.424 Token Count After Turkish Translation: 34.608.427 Number of Successfully Translated Row: 59.277 ```
t3aile/airoboros-3.1-Turkish
[ "size_categories:10K<n<100K", "language:tr", "region:us" ]
2024-01-04T13:21:37+00:00
{"language": ["tr"], "size_categories": ["10K<n<100K"]}
2024-01-04T13:28:24+00:00
[]
[ "tr" ]
TAGS #size_categories-10K<n<100K #language-Turkish #region-us
# airoboros-3.1-Turkish ## Metrics:
[ "# airoboros-3.1-Turkish", "## Metrics:" ]
[ "TAGS\n#size_categories-10K<n<100K #language-Turkish #region-us \n", "# airoboros-3.1-Turkish", "## Metrics:" ]
[ 24, 10, 5 ]
[ "passage: TAGS\n#size_categories-10K<n<100K #language-Turkish #region-us \n# airoboros-3.1-Turkish## Metrics:" ]
414a948b8dbb6fcc43ed68b6dd227ce41038e0ad
# Dataset Card for Evaluation run of mwitiderrick/SwahiliInstruct-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [mwitiderrick/SwahiliInstruct-v0.1](https://huggingface.co/mwitiderrick/SwahiliInstruct-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_mwitiderrick__SwahiliInstruct-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:23:17.485650](https://huggingface.co/datasets/open-llm-leaderboard/details_mwitiderrick__SwahiliInstruct-v0.1/blob/main/results_2024-01-04T13-23-17.485650.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.568056530303725, "acc_stderr": 0.03389424659403196, "acc_norm": 0.5741618135624408, "acc_norm_stderr": 0.03461557207796032, "mc1": 0.412484700122399, "mc1_stderr": 0.01723329939957122, "mc2": 0.5807786148821394, "mc2_stderr": 0.015264526115121314 }, "harness|arc:challenge|25": { "acc": 0.5452218430034129, "acc_stderr": 0.01455150706083636, "acc_norm": 0.575938566552901, "acc_norm_stderr": 0.014441889627464398 }, "harness|hellaswag|10": { "acc": 0.6167098187612029, "acc_stderr": 0.0048519441706712605, "acc_norm": 0.8092013543118901, "acc_norm_stderr": 0.003921276446819986 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5111111111111111, "acc_stderr": 0.04318275491977976, "acc_norm": 0.5111111111111111, "acc_norm_stderr": 0.04318275491977976 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6052631578947368, "acc_stderr": 0.039777499346220734, "acc_norm": 0.6052631578947368, "acc_norm_stderr": 0.039777499346220734 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6226415094339622, "acc_stderr": 0.029832808114796005, "acc_norm": 0.6226415094339622, "acc_norm_stderr": 0.029832808114796005 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.625, "acc_stderr": 0.04048439222695598, "acc_norm": 0.625, "acc_norm_stderr": 0.04048439222695598 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5606936416184971, "acc_stderr": 0.037842719328874674, "acc_norm": 0.5606936416184971, "acc_norm_stderr": 0.037842719328874674 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542129, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542129 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.4808510638297872, "acc_stderr": 0.032662042990646775, "acc_norm": 0.4808510638297872, "acc_norm_stderr": 0.032662042990646775 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.39473684210526316, "acc_stderr": 0.045981880578165414, "acc_norm": 0.39473684210526316, "acc_norm_stderr": 0.045981880578165414 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5379310344827586, "acc_stderr": 0.041546596717075474, "acc_norm": 0.5379310344827586, "acc_norm_stderr": 0.041546596717075474 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.37566137566137564, "acc_stderr": 0.02494236893115978, "acc_norm": 0.37566137566137564, "acc_norm_stderr": 0.02494236893115978 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4126984126984127, "acc_stderr": 0.04403438954768176, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.04403438954768176 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6451612903225806, "acc_stderr": 0.027218889773308757, "acc_norm": 0.6451612903225806, "acc_norm_stderr": 0.027218889773308757 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4482758620689655, "acc_stderr": 0.03499113137676744, "acc_norm": 0.4482758620689655, "acc_norm_stderr": 0.03499113137676744 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.57, "acc_stderr": 0.049756985195624284, "acc_norm": 0.57, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7212121212121212, "acc_stderr": 0.03501438706296781, "acc_norm": 0.7212121212121212, "acc_norm_stderr": 0.03501438706296781 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7373737373737373, "acc_stderr": 0.03135305009533086, "acc_norm": 0.7373737373737373, "acc_norm_stderr": 0.03135305009533086 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.7979274611398963, "acc_stderr": 0.02897908979429673, "acc_norm": 0.7979274611398963, "acc_norm_stderr": 0.02897908979429673 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5487179487179488, "acc_stderr": 0.02523038123893484, "acc_norm": 0.5487179487179488, "acc_norm_stderr": 0.02523038123893484 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3296296296296296, "acc_stderr": 0.028661201116524565, "acc_norm": 0.3296296296296296, "acc_norm_stderr": 0.028661201116524565 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5966386554621849, "acc_stderr": 0.031866081214088314, "acc_norm": 0.5966386554621849, "acc_norm_stderr": 0.031866081214088314 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3973509933774834, "acc_stderr": 0.03995524007681679, "acc_norm": 0.3973509933774834, "acc_norm_stderr": 0.03995524007681679 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7376146788990826, "acc_stderr": 0.01886188502153473, "acc_norm": 0.7376146788990826, "acc_norm_stderr": 0.01886188502153473 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.49537037037037035, "acc_stderr": 0.03409825519163572, "acc_norm": 0.49537037037037035, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7009803921568627, "acc_stderr": 0.03213325717373617, "acc_norm": 0.7009803921568627, "acc_norm_stderr": 0.03213325717373617 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.729957805907173, "acc_stderr": 0.028900721906293433, "acc_norm": 0.729957805907173, "acc_norm_stderr": 0.028900721906293433 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6143497757847534, "acc_stderr": 0.03266842214289201, "acc_norm": 0.6143497757847534, "acc_norm_stderr": 0.03266842214289201 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6793893129770993, "acc_stderr": 0.04093329229834278, "acc_norm": 0.6793893129770993, "acc_norm_stderr": 0.04093329229834278 }, "harness|hendrycksTest-international_law|5": { "acc": 0.743801652892562, "acc_stderr": 0.03984979653302872, "acc_norm": 0.743801652892562, "acc_norm_stderr": 0.03984979653302872 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6666666666666666, "acc_stderr": 0.04557239513497752, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.04557239513497752 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6809815950920245, "acc_stderr": 0.03661997551073836, "acc_norm": 0.6809815950920245, "acc_norm_stderr": 0.03661997551073836 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.6893203883495146, "acc_stderr": 0.04582124160161549, "acc_norm": 0.6893203883495146, "acc_norm_stderr": 0.04582124160161549 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8461538461538461, "acc_stderr": 0.02363687331748928, "acc_norm": 0.8461538461538461, "acc_norm_stderr": 0.02363687331748928 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7458492975734355, "acc_stderr": 0.015569254692045764, "acc_norm": 0.7458492975734355, "acc_norm_stderr": 0.015569254692045764 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5838150289017341, "acc_stderr": 0.026538189104705477, "acc_norm": 0.5838150289017341, "acc_norm_stderr": 0.026538189104705477 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2581005586592179, "acc_stderr": 0.014635185616527819, "acc_norm": 0.2581005586592179, "acc_norm_stderr": 0.014635185616527819 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6111111111111112, "acc_stderr": 0.027914055510467998, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.027914055510467998 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6334405144694534, "acc_stderr": 0.027368078243971635, "acc_norm": 0.6334405144694534, "acc_norm_stderr": 0.027368078243971635 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6111111111111112, "acc_stderr": 0.027125115513166848, "acc_norm": 0.6111111111111112, "acc_norm_stderr": 0.027125115513166848 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.38652482269503546, "acc_stderr": 0.029049190342543454, "acc_norm": 0.38652482269503546, "acc_norm_stderr": 0.029049190342543454 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.40221642764015647, "acc_stderr": 0.012523646856180178, "acc_norm": 0.40221642764015647, "acc_norm_stderr": 0.012523646856180178 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5514705882352942, "acc_stderr": 0.030211479609121596, "acc_norm": 0.5514705882352942, "acc_norm_stderr": 0.030211479609121596 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.5392156862745098, "acc_stderr": 0.020165523313907908, "acc_norm": 0.5392156862745098, "acc_norm_stderr": 0.020165523313907908 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6612244897959184, "acc_stderr": 0.030299506562154185, "acc_norm": 0.6612244897959184, "acc_norm_stderr": 0.030299506562154185 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7114427860696517, "acc_stderr": 0.03203841040213321, "acc_norm": 0.7114427860696517, "acc_norm_stderr": 0.03203841040213321 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.78, "acc_stderr": 0.04163331998932263, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932263 }, "harness|hendrycksTest-virology|5": { "acc": 0.46987951807228917, "acc_stderr": 0.03885425420866767, "acc_norm": 0.46987951807228917, "acc_norm_stderr": 0.03885425420866767 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7602339181286549, "acc_stderr": 0.032744852119469564, "acc_norm": 0.7602339181286549, "acc_norm_stderr": 0.032744852119469564 }, "harness|truthfulqa:mc|0": { "mc1": 0.412484700122399, "mc1_stderr": 0.01723329939957122, "mc2": 0.5807786148821394, "mc2_stderr": 0.015264526115121314 }, "harness|winogrande|5": { "acc": 0.7466456195737964, "acc_stderr": 0.012223754434233621 }, "harness|gsm8k|5": { "acc": 0.25246398786959817, "acc_stderr": 0.011966250044833981 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_mwitiderrick__SwahiliInstruct-v0.1
[ "region:us" ]
2024-01-04T13:25:34+00:00
{"pretty_name": "Evaluation run of mwitiderrick/SwahiliInstruct-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [mwitiderrick/SwahiliInstruct-v0.1](https://huggingface.co/mwitiderrick/SwahiliInstruct-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_mwitiderrick__SwahiliInstruct-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:23:17.485650](https://huggingface.co/datasets/open-llm-leaderboard/details_mwitiderrick__SwahiliInstruct-v0.1/blob/main/results_2024-01-04T13-23-17.485650.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.568056530303725,\n \"acc_stderr\": 0.03389424659403196,\n \"acc_norm\": 0.5741618135624408,\n \"acc_norm_stderr\": 0.03461557207796032,\n \"mc1\": 0.412484700122399,\n \"mc1_stderr\": 0.01723329939957122,\n \"mc2\": 0.5807786148821394,\n \"mc2_stderr\": 0.015264526115121314\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5452218430034129,\n \"acc_stderr\": 0.01455150706083636,\n \"acc_norm\": 0.575938566552901,\n \"acc_norm_stderr\": 0.014441889627464398\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6167098187612029,\n \"acc_stderr\": 0.0048519441706712605,\n \"acc_norm\": 0.8092013543118901,\n \"acc_norm_stderr\": 0.003921276446819986\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5111111111111111,\n \"acc_stderr\": 0.04318275491977976,\n \"acc_norm\": 0.5111111111111111,\n \"acc_norm_stderr\": 0.04318275491977976\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6052631578947368,\n \"acc_stderr\": 0.039777499346220734,\n \"acc_norm\": 0.6052631578947368,\n \"acc_norm_stderr\": 0.039777499346220734\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6226415094339622,\n \"acc_stderr\": 0.029832808114796005,\n \"acc_norm\": 0.6226415094339622,\n \"acc_norm_stderr\": 0.029832808114796005\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.04048439222695598,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.04048439222695598\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5606936416184971,\n \"acc_stderr\": 0.037842719328874674,\n \"acc_norm\": 0.5606936416184971,\n \"acc_norm_stderr\": 0.037842719328874674\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542129,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542129\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.4808510638297872,\n \"acc_stderr\": 0.032662042990646775,\n \"acc_norm\": 0.4808510638297872,\n \"acc_norm_stderr\": 0.032662042990646775\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.39473684210526316,\n \"acc_stderr\": 0.045981880578165414,\n \"acc_norm\": 0.39473684210526316,\n \"acc_norm_stderr\": 0.045981880578165414\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5379310344827586,\n \"acc_stderr\": 0.041546596717075474,\n \"acc_norm\": 0.5379310344827586,\n \"acc_norm_stderr\": 0.041546596717075474\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.37566137566137564,\n \"acc_stderr\": 0.02494236893115978,\n \"acc_norm\": 0.37566137566137564,\n \"acc_norm_stderr\": 0.02494236893115978\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4126984126984127,\n \"acc_stderr\": 0.04403438954768176,\n \"acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.04403438954768176\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6451612903225806,\n \"acc_stderr\": 0.027218889773308757,\n \"acc_norm\": 0.6451612903225806,\n \"acc_norm_stderr\": 0.027218889773308757\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4482758620689655,\n \"acc_stderr\": 0.03499113137676744,\n \"acc_norm\": 0.4482758620689655,\n \"acc_norm_stderr\": 0.03499113137676744\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7212121212121212,\n \"acc_stderr\": 0.03501438706296781,\n \"acc_norm\": 0.7212121212121212,\n \"acc_norm_stderr\": 0.03501438706296781\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7373737373737373,\n \"acc_stderr\": 0.03135305009533086,\n \"acc_norm\": 0.7373737373737373,\n \"acc_norm_stderr\": 0.03135305009533086\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.7979274611398963,\n \"acc_stderr\": 0.02897908979429673,\n \"acc_norm\": 0.7979274611398963,\n \"acc_norm_stderr\": 0.02897908979429673\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5487179487179488,\n \"acc_stderr\": 0.02523038123893484,\n \"acc_norm\": 0.5487179487179488,\n \"acc_norm_stderr\": 0.02523038123893484\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3296296296296296,\n \"acc_stderr\": 0.028661201116524565,\n \"acc_norm\": 0.3296296296296296,\n \"acc_norm_stderr\": 0.028661201116524565\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5966386554621849,\n \"acc_stderr\": 0.031866081214088314,\n \"acc_norm\": 0.5966386554621849,\n \"acc_norm_stderr\": 0.031866081214088314\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3973509933774834,\n \"acc_stderr\": 0.03995524007681679,\n \"acc_norm\": 0.3973509933774834,\n \"acc_norm_stderr\": 0.03995524007681679\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7376146788990826,\n \"acc_stderr\": 0.01886188502153473,\n \"acc_norm\": 0.7376146788990826,\n \"acc_norm_stderr\": 0.01886188502153473\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.49537037037037035,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.49537037037037035,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7009803921568627,\n \"acc_stderr\": 0.03213325717373617,\n \"acc_norm\": 0.7009803921568627,\n \"acc_norm_stderr\": 0.03213325717373617\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.729957805907173,\n \"acc_stderr\": 0.028900721906293433,\n \"acc_norm\": 0.729957805907173,\n \"acc_norm_stderr\": 0.028900721906293433\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6143497757847534,\n \"acc_stderr\": 0.03266842214289201,\n \"acc_norm\": 0.6143497757847534,\n \"acc_norm_stderr\": 0.03266842214289201\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6793893129770993,\n \"acc_stderr\": 0.04093329229834278,\n \"acc_norm\": 0.6793893129770993,\n \"acc_norm_stderr\": 0.04093329229834278\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.743801652892562,\n \"acc_stderr\": 0.03984979653302872,\n \"acc_norm\": 0.743801652892562,\n \"acc_norm_stderr\": 0.03984979653302872\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.04557239513497752,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.04557239513497752\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6809815950920245,\n \"acc_stderr\": 0.03661997551073836,\n \"acc_norm\": 0.6809815950920245,\n \"acc_norm_stderr\": 0.03661997551073836\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6893203883495146,\n \"acc_stderr\": 0.04582124160161549,\n \"acc_norm\": 0.6893203883495146,\n \"acc_norm_stderr\": 0.04582124160161549\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8461538461538461,\n \"acc_stderr\": 0.02363687331748928,\n \"acc_norm\": 0.8461538461538461,\n \"acc_norm_stderr\": 0.02363687331748928\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7458492975734355,\n \"acc_stderr\": 0.015569254692045764,\n \"acc_norm\": 0.7458492975734355,\n \"acc_norm_stderr\": 0.015569254692045764\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5838150289017341,\n \"acc_stderr\": 0.026538189104705477,\n \"acc_norm\": 0.5838150289017341,\n \"acc_norm_stderr\": 0.026538189104705477\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2581005586592179,\n \"acc_stderr\": 0.014635185616527819,\n \"acc_norm\": 0.2581005586592179,\n \"acc_norm_stderr\": 0.014635185616527819\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.027914055510467998,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.027914055510467998\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6334405144694534,\n \"acc_stderr\": 0.027368078243971635,\n \"acc_norm\": 0.6334405144694534,\n \"acc_norm_stderr\": 0.027368078243971635\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6111111111111112,\n \"acc_stderr\": 0.027125115513166848,\n \"acc_norm\": 0.6111111111111112,\n \"acc_norm_stderr\": 0.027125115513166848\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.38652482269503546,\n \"acc_stderr\": 0.029049190342543454,\n \"acc_norm\": 0.38652482269503546,\n \"acc_norm_stderr\": 0.029049190342543454\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.40221642764015647,\n \"acc_stderr\": 0.012523646856180178,\n \"acc_norm\": 0.40221642764015647,\n \"acc_norm_stderr\": 0.012523646856180178\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5514705882352942,\n \"acc_stderr\": 0.030211479609121596,\n \"acc_norm\": 0.5514705882352942,\n \"acc_norm_stderr\": 0.030211479609121596\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.5392156862745098,\n \"acc_stderr\": 0.020165523313907908,\n \"acc_norm\": 0.5392156862745098,\n \"acc_norm_stderr\": 0.020165523313907908\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6612244897959184,\n \"acc_stderr\": 0.030299506562154185,\n \"acc_norm\": 0.6612244897959184,\n \"acc_norm_stderr\": 0.030299506562154185\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7114427860696517,\n \"acc_stderr\": 0.03203841040213321,\n \"acc_norm\": 0.7114427860696517,\n \"acc_norm_stderr\": 0.03203841040213321\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932263,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932263\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.46987951807228917,\n \"acc_stderr\": 0.03885425420866767,\n \"acc_norm\": 0.46987951807228917,\n \"acc_norm_stderr\": 0.03885425420866767\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7602339181286549,\n \"acc_stderr\": 0.032744852119469564,\n \"acc_norm\": 0.7602339181286549,\n \"acc_norm_stderr\": 0.032744852119469564\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.412484700122399,\n \"mc1_stderr\": 0.01723329939957122,\n \"mc2\": 0.5807786148821394,\n \"mc2_stderr\": 0.015264526115121314\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7466456195737964,\n \"acc_stderr\": 0.012223754434233621\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.25246398786959817,\n \"acc_stderr\": 0.011966250044833981\n }\n}\n```", "repo_url": "https://huggingface.co/mwitiderrick/SwahiliInstruct-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-23-17.485650.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["**/details_harness|winogrande|5_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-23-17.485650.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_23_17.485650", "path": ["results_2024-01-04T13-23-17.485650.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-23-17.485650.parquet"]}]}]}
2024-01-04T13:25:59+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of mwitiderrick/SwahiliInstruct-v0.1 Dataset automatically created during the evaluation run of model mwitiderrick/SwahiliInstruct-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:23:17.485650(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of mwitiderrick/SwahiliInstruct-v0.1\n\n\n\nDataset automatically created during the evaluation run of model mwitiderrick/SwahiliInstruct-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:23:17.485650(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of mwitiderrick/SwahiliInstruct-v0.1\n\n\n\nDataset automatically created during the evaluation run of model mwitiderrick/SwahiliInstruct-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:23:17.485650(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of mwitiderrick/SwahiliInstruct-v0.1\n\n\n\nDataset automatically created during the evaluation run of model mwitiderrick/SwahiliInstruct-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:23:17.485650(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
d415a6c634273a13bbc3881affe70a6af253e9b9
<img src="https://huggingface.co/datasets/nyanko7/danbooru2023/resolve/main/cover.webp" alt="cover" width="750"/> # Danbooru2023: A Large-Scale Crowdsourced and Tagged Anime Illustration Dataset <!-- Provide a quick summary of the dataset. --> Danbooru2023 is a large-scale anime image dataset with over 5 million images contributed and annotated in detail by an enthusiast community. Image tags cover aspects like characters, scenes, copyrights, artists, etc with an average of 30 tags per image. Danbooru is a veteran anime image board with high-quality images and extensive tag metadata. The dataset can be used to train image classification, multi-label tagging, character detection, generative models, and other computer vision tasks. - **Shared by:** Nyanko Devs - **Language(s):** English, Japanese - **License:** MIT This dataset is built on the top of [danbooru2021](https://gwern.net/danbooru2021). We expands the dataset to include images up to ID #6,857,737, adding over 1.8 million additional images and total size is now approximately 8 terabytes (8,000 GB). ## Use ## Format The goal of the dataset is to be as easy as possible to use immediately, avoiding obscure file formats, while allowing simultaneous research & seeding of the torrent, with easy updates. Images are provided in the full original form (be that JPG, PNG, GIF or otherwise) for reference/archival purposes, and bucketed into 1000 subdirectories 0000–0999 (0-padded), which is the Danbooru ID modulo 1000 (ie. all images in 0999/ have an ID ending in ‘999’); IDs can be turned into paths by dividing & padding (eg. in Bash, BUCKET=$(printf "%04d" $(( ID % 1000 )) )) and then the file is at {original,512px}/$BUCKET/$ID.$EXT. The reason for the bucketing is that a single directory would cause pathological filesystem performance, and modulo ID is a simple hash which spreads images evenly without requiring additional future directories to be made or a filesystem IO to check where the file is. The ID is not zero-padded and files end in the relevant extension, hence the file layout looks like this: ```bash $ tree / | less / ├── danbooru2023 -> /mnt/diffusionstorage/workspace/danbooru/ │ ├── metadata │ ├── readme.md │ ├── original │ │ ├── 0000 -> data-0000.tar │ │ ├── 0001 -> data-0001.tar │ │ │ ├── 10001.jpg │ │ │ ├── 210001.png │ │ │ ├── 3120001.webp │ │ │ ├── 6513001.jpg ``` Currently represented file extensions are: avi/bmp/gif/html/jpeg/jpg/mp3/mp4/mpg/pdf/png/rar/swf/webm/wmv/zip. Raw original files are treacherous. Be careful if working with the original dataset. There are many odd files: truncated, non-sRGB colorspace, wrong file extensions (eg. some PNGs have .jpg extensions like original/0146/1525146.jpg or original/0558/1422558.jpg), etc.
nyanko7/danbooru2023
[ "task_categories:image-classification", "task_categories:image-to-image", "task_categories:text-to-image", "size_categories:1M<n<10M", "language:en", "language:ja", "license:mit", "region:us" ]
2024-01-04T13:28:13+00:00
{"language": ["en", "ja"], "license": "mit", "size_categories": ["1M<n<10M"], "task_categories": ["image-classification", "image-to-image", "text-to-image"], "pretty_name": "danbooru2023", "viewer": false}
2024-01-29T06:26:20+00:00
[]
[ "en", "ja" ]
TAGS #task_categories-image-classification #task_categories-image-to-image #task_categories-text-to-image #size_categories-1M<n<10M #language-English #language-Japanese #license-mit #region-us
<img src="URL alt="cover" width="750"/> # Danbooru2023: A Large-Scale Crowdsourced and Tagged Anime Illustration Dataset Danbooru2023 is a large-scale anime image dataset with over 5 million images contributed and annotated in detail by an enthusiast community. Image tags cover aspects like characters, scenes, copyrights, artists, etc with an average of 30 tags per image. Danbooru is a veteran anime image board with high-quality images and extensive tag metadata. The dataset can be used to train image classification, multi-label tagging, character detection, generative models, and other computer vision tasks. - Shared by: Nyanko Devs - Language(s): English, Japanese - License: MIT This dataset is built on the top of danbooru2021. We expands the dataset to include images up to ID #6,857,737, adding over 1.8 million additional images and total size is now approximately 8 terabytes (8,000 GB). ## Use ## Format The goal of the dataset is to be as easy as possible to use immediately, avoiding obscure file formats, while allowing simultaneous research & seeding of the torrent, with easy updates. Images are provided in the full original form (be that JPG, PNG, GIF or otherwise) for reference/archival purposes, and bucketed into 1000 subdirectories 0000–0999 (0-padded), which is the Danbooru ID modulo 1000 (ie. all images in 0999/ have an ID ending in ‘999’); IDs can be turned into paths by dividing & padding (eg. in Bash, BUCKET=$(printf "%04d" $(( ID % 1000 )) )) and then the file is at {original,512px}/$BUCKET/$ID.$EXT. The reason for the bucketing is that a single directory would cause pathological filesystem performance, and modulo ID is a simple hash which spreads images evenly without requiring additional future directories to be made or a filesystem IO to check where the file is. The ID is not zero-padded and files end in the relevant extension, hence the file layout looks like this: Currently represented file extensions are: avi/bmp/gif/html/jpeg/jpg/mp3/mp4/mpg/pdf/png/rar/swf/webm/wmv/zip. Raw original files are treacherous. Be careful if working with the original dataset. There are many odd files: truncated, non-sRGB colorspace, wrong file extensions (eg. some PNGs have .jpg extensions like original/0146/URL or original/0558/URL), etc.
[ "# Danbooru2023: A Large-Scale Crowdsourced and Tagged Anime Illustration Dataset\n\n\n\nDanbooru2023 is a large-scale anime image dataset with over 5 million images contributed and annotated in detail by an enthusiast community. Image tags cover aspects like characters, scenes, copyrights, artists, etc with an average of 30 tags per image.\n\nDanbooru is a veteran anime image board with high-quality images and extensive tag metadata. The dataset can be used to train image classification, multi-label tagging, character detection, generative models, and other computer vision tasks.\n\n- Shared by: Nyanko Devs\n- Language(s): English, Japanese\n- License: MIT\n\nThis dataset is built on the top of danbooru2021. We expands the dataset to include images up to ID #6,857,737, adding over 1.8 million additional images and total size is now approximately 8 terabytes (8,000 GB).", "## Use", "## Format\n\nThe goal of the dataset is to be as easy as possible to use immediately, avoiding obscure file formats, while allowing simultaneous research & seeding of the torrent, with easy updates.\n\nImages are provided in the full original form (be that JPG, PNG, GIF or otherwise) for reference/archival purposes, and bucketed into 1000 subdirectories 0000–0999 (0-padded), which is the Danbooru ID modulo 1000 (ie. all images in 0999/ have an ID ending in ‘999’); IDs can be turned into paths by dividing & padding (eg. in Bash, BUCKET=$(printf \"%04d\" $(( ID % 1000 )) )) and then the file is at {original,512px}/$BUCKET/$ID.$EXT. \n\nThe reason for the bucketing is that a single directory would cause pathological filesystem performance, and modulo ID is a simple hash which spreads images evenly without requiring additional future directories to be made or a filesystem IO to check where the file is. The ID is not zero-padded and files end in the relevant extension, hence the file layout looks like this:\n\n\n \nCurrently represented file extensions are: avi/bmp/gif/html/jpeg/jpg/mp3/mp4/mpg/pdf/png/rar/swf/webm/wmv/zip. \n\nRaw original files are treacherous. Be careful if working with the original dataset. There are many odd files: truncated, non-sRGB colorspace, wrong file extensions (eg. some PNGs have .jpg extensions like original/0146/URL or original/0558/URL), etc." ]
[ "TAGS\n#task_categories-image-classification #task_categories-image-to-image #task_categories-text-to-image #size_categories-1M<n<10M #language-English #language-Japanese #license-mit #region-us \n", "# Danbooru2023: A Large-Scale Crowdsourced and Tagged Anime Illustration Dataset\n\n\n\nDanbooru2023 is a large-scale anime image dataset with over 5 million images contributed and annotated in detail by an enthusiast community. Image tags cover aspects like characters, scenes, copyrights, artists, etc with an average of 30 tags per image.\n\nDanbooru is a veteran anime image board with high-quality images and extensive tag metadata. The dataset can be used to train image classification, multi-label tagging, character detection, generative models, and other computer vision tasks.\n\n- Shared by: Nyanko Devs\n- Language(s): English, Japanese\n- License: MIT\n\nThis dataset is built on the top of danbooru2021. We expands the dataset to include images up to ID #6,857,737, adding over 1.8 million additional images and total size is now approximately 8 terabytes (8,000 GB).", "## Use", "## Format\n\nThe goal of the dataset is to be as easy as possible to use immediately, avoiding obscure file formats, while allowing simultaneous research & seeding of the torrent, with easy updates.\n\nImages are provided in the full original form (be that JPG, PNG, GIF or otherwise) for reference/archival purposes, and bucketed into 1000 subdirectories 0000–0999 (0-padded), which is the Danbooru ID modulo 1000 (ie. all images in 0999/ have an ID ending in ‘999’); IDs can be turned into paths by dividing & padding (eg. in Bash, BUCKET=$(printf \"%04d\" $(( ID % 1000 )) )) and then the file is at {original,512px}/$BUCKET/$ID.$EXT. \n\nThe reason for the bucketing is that a single directory would cause pathological filesystem performance, and modulo ID is a simple hash which spreads images evenly without requiring additional future directories to be made or a filesystem IO to check where the file is. The ID is not zero-padded and files end in the relevant extension, hence the file layout looks like this:\n\n\n \nCurrently represented file extensions are: avi/bmp/gif/html/jpeg/jpg/mp3/mp4/mpg/pdf/png/rar/swf/webm/wmv/zip. \n\nRaw original files are treacherous. Be careful if working with the original dataset. There are many odd files: truncated, non-sRGB colorspace, wrong file extensions (eg. some PNGs have .jpg extensions like original/0146/URL or original/0558/URL), etc." ]
[ 68, 213, 2, 391 ]
[ "passage: TAGS\n#task_categories-image-classification #task_categories-image-to-image #task_categories-text-to-image #size_categories-1M<n<10M #language-English #language-Japanese #license-mit #region-us \n# Danbooru2023: A Large-Scale Crowdsourced and Tagged Anime Illustration Dataset\n\n\n\nDanbooru2023 is a large-scale anime image dataset with over 5 million images contributed and annotated in detail by an enthusiast community. Image tags cover aspects like characters, scenes, copyrights, artists, etc with an average of 30 tags per image.\n\nDanbooru is a veteran anime image board with high-quality images and extensive tag metadata. The dataset can be used to train image classification, multi-label tagging, character detection, generative models, and other computer vision tasks.\n\n- Shared by: Nyanko Devs\n- Language(s): English, Japanese\n- License: MIT\n\nThis dataset is built on the top of danbooru2021. We expands the dataset to include images up to ID #6,857,737, adding over 1.8 million additional images and total size is now approximately 8 terabytes (8,000 GB).## Use" ]
1df15ae29a947b7e1d30b65f1027848cf87060f5
# Dataset Card for Evaluation run of HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K](https://huggingface.co/HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_HenryJJ__Instruct_Mistral-7B-v0.1_Dolly15K", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:27:32.660899](https://huggingface.co/datasets/open-llm-leaderboard/details_HenryJJ__Instruct_Mistral-7B-v0.1_Dolly15K/blob/main/results_2024-01-04T13-27-32.660899.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6241143484289186, "acc_stderr": 0.032689663124831826, "acc_norm": 0.6299031400315822, "acc_norm_stderr": 0.033361474961048916, "mc1": 0.2802937576499388, "mc1_stderr": 0.015723139524608767, "mc2": 0.435601924823795, "mc2_stderr": 0.014179199089974604 }, "harness|arc:challenge|25": { "acc": 0.5571672354948806, "acc_stderr": 0.014515573873348906, "acc_norm": 0.5938566552901023, "acc_norm_stderr": 0.014351656690097862 }, "harness|hellaswag|10": { "acc": 0.6253734315873332, "acc_stderr": 0.004830371317841054, "acc_norm": 0.826229834694284, "acc_norm_stderr": 0.00378137335887 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6513157894736842, "acc_stderr": 0.03878139888797611, "acc_norm": 0.6513157894736842, "acc_norm_stderr": 0.03878139888797611 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.660377358490566, "acc_stderr": 0.029146904747798328, "acc_norm": 0.660377358490566, "acc_norm_stderr": 0.029146904747798328 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566017, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566017 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620332, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620332 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5838150289017341, "acc_stderr": 0.03758517775404947, "acc_norm": 0.5838150289017341, "acc_norm_stderr": 0.03758517775404947 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.35294117647058826, "acc_stderr": 0.04755129616062946, "acc_norm": 0.35294117647058826, "acc_norm_stderr": 0.04755129616062946 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5574468085106383, "acc_stderr": 0.032469569197899575, "acc_norm": 0.5574468085106383, "acc_norm_stderr": 0.032469569197899575 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.041227371113703316, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.041227371113703316 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3994708994708995, "acc_stderr": 0.02522545028406788, "acc_norm": 0.3994708994708995, "acc_norm_stderr": 0.02522545028406788 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.3968253968253968, "acc_stderr": 0.04375888492727061, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.04375888492727061 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7483870967741936, "acc_stderr": 0.024685979286239956, "acc_norm": 0.7483870967741936, "acc_norm_stderr": 0.024685979286239956 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5221674876847291, "acc_stderr": 0.03514528562175008, "acc_norm": 0.5221674876847291, "acc_norm_stderr": 0.03514528562175008 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.04725815626252607, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252607 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.03317505930009182, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.03317505930009182 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7525252525252525, "acc_stderr": 0.030746300742124498, "acc_norm": 0.7525252525252525, "acc_norm_stderr": 0.030746300742124498 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.844559585492228, "acc_stderr": 0.026148483469153314, "acc_norm": 0.844559585492228, "acc_norm_stderr": 0.026148483469153314 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6205128205128205, "acc_stderr": 0.024603626924097417, "acc_norm": 0.6205128205128205, "acc_norm_stderr": 0.024603626924097417 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.337037037037037, "acc_stderr": 0.028820884666253252, "acc_norm": 0.337037037037037, "acc_norm_stderr": 0.028820884666253252 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6260504201680672, "acc_stderr": 0.031429466378837076, "acc_norm": 0.6260504201680672, "acc_norm_stderr": 0.031429466378837076 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7944954128440367, "acc_stderr": 0.01732435232501601, "acc_norm": 0.7944954128440367, "acc_norm_stderr": 0.01732435232501601 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5046296296296297, "acc_stderr": 0.03409825519163572, "acc_norm": 0.5046296296296297, "acc_norm_stderr": 0.03409825519163572 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8137254901960784, "acc_stderr": 0.027325470966716312, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.027325470966716312 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7763713080168776, "acc_stderr": 0.027123298205229966, "acc_norm": 0.7763713080168776, "acc_norm_stderr": 0.027123298205229966 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.031146796482972465, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.031146796482972465 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7557251908396947, "acc_stderr": 0.037683359597287434, "acc_norm": 0.7557251908396947, "acc_norm_stderr": 0.037683359597287434 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.75, "acc_stderr": 0.04186091791394607, "acc_norm": 0.75, "acc_norm_stderr": 0.04186091791394607 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7791411042944786, "acc_stderr": 0.03259177392742178, "acc_norm": 0.7791411042944786, "acc_norm_stderr": 0.03259177392742178 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.41964285714285715, "acc_stderr": 0.04684099321077106, "acc_norm": 0.41964285714285715, "acc_norm_stderr": 0.04684099321077106 }, "harness|hendrycksTest-management|5": { "acc": 0.7961165048543689, "acc_stderr": 0.039891398595317706, "acc_norm": 0.7961165048543689, "acc_norm_stderr": 0.039891398595317706 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597528, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597528 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8135376756066411, "acc_stderr": 0.013927751372001501, "acc_norm": 0.8135376756066411, "acc_norm_stderr": 0.013927751372001501 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6994219653179191, "acc_stderr": 0.0246853168672578, "acc_norm": 0.6994219653179191, "acc_norm_stderr": 0.0246853168672578 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4033519553072626, "acc_stderr": 0.01640712303219525, "acc_norm": 0.4033519553072626, "acc_norm_stderr": 0.01640712303219525 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7320261437908496, "acc_stderr": 0.02536060379624255, "acc_norm": 0.7320261437908496, "acc_norm_stderr": 0.02536060379624255 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7009646302250804, "acc_stderr": 0.02600330111788514, "acc_norm": 0.7009646302250804, "acc_norm_stderr": 0.02600330111788514 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7067901234567902, "acc_stderr": 0.025329888171900926, "acc_norm": 0.7067901234567902, "acc_norm_stderr": 0.025329888171900926 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.49645390070921985, "acc_stderr": 0.02982674915328092, "acc_norm": 0.49645390070921985, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.44784876140808344, "acc_stderr": 0.01270058240476822, "acc_norm": 0.44784876140808344, "acc_norm_stderr": 0.01270058240476822 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6397058823529411, "acc_stderr": 0.029163128570670733, "acc_norm": 0.6397058823529411, "acc_norm_stderr": 0.029163128570670733 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6666666666666666, "acc_stderr": 0.019070985589687495, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.019070985589687495 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7020408163265306, "acc_stderr": 0.029279567411065677, "acc_norm": 0.7020408163265306, "acc_norm_stderr": 0.029279567411065677 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7960199004975125, "acc_stderr": 0.02849317624532607, "acc_norm": 0.7960199004975125, "acc_norm_stderr": 0.02849317624532607 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774709, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774709 }, "harness|hendrycksTest-virology|5": { "acc": 0.5542168674698795, "acc_stderr": 0.03869543323472101, "acc_norm": 0.5542168674698795, "acc_norm_stderr": 0.03869543323472101 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8011695906432749, "acc_stderr": 0.03061111655743253, "acc_norm": 0.8011695906432749, "acc_norm_stderr": 0.03061111655743253 }, "harness|truthfulqa:mc|0": { "mc1": 0.2802937576499388, "mc1_stderr": 0.015723139524608767, "mc2": 0.435601924823795, "mc2_stderr": 0.014179199089974604 }, "harness|winogrande|5": { "acc": 0.7932123125493291, "acc_stderr": 0.011382566829235805 }, "harness|gsm8k|5": { "acc": 0.3510235026535254, "acc_stderr": 0.01314694594139722 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_HenryJJ__Instruct_Mistral-7B-v0.1_Dolly15K
[ "region:us" ]
2024-01-04T13:29:53+00:00
{"pretty_name": "Evaluation run of HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K", "dataset_summary": "Dataset automatically created during the evaluation run of model [HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K](https://huggingface.co/HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_HenryJJ__Instruct_Mistral-7B-v0.1_Dolly15K\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:27:32.660899](https://huggingface.co/datasets/open-llm-leaderboard/details_HenryJJ__Instruct_Mistral-7B-v0.1_Dolly15K/blob/main/results_2024-01-04T13-27-32.660899.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6241143484289186,\n \"acc_stderr\": 0.032689663124831826,\n \"acc_norm\": 0.6299031400315822,\n \"acc_norm_stderr\": 0.033361474961048916,\n \"mc1\": 0.2802937576499388,\n \"mc1_stderr\": 0.015723139524608767,\n \"mc2\": 0.435601924823795,\n \"mc2_stderr\": 0.014179199089974604\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5571672354948806,\n \"acc_stderr\": 0.014515573873348906,\n \"acc_norm\": 0.5938566552901023,\n \"acc_norm_stderr\": 0.014351656690097862\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6253734315873332,\n \"acc_stderr\": 0.004830371317841054,\n \"acc_norm\": 0.826229834694284,\n \"acc_norm_stderr\": 0.00378137335887\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6513157894736842,\n \"acc_stderr\": 0.03878139888797611,\n \"acc_norm\": 0.6513157894736842,\n \"acc_norm_stderr\": 0.03878139888797611\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.660377358490566,\n \"acc_stderr\": 0.029146904747798328,\n \"acc_norm\": 0.660377358490566,\n \"acc_norm_stderr\": 0.029146904747798328\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n \"acc_stderr\": 0.03716177437566017,\n \"acc_norm\": 0.7291666666666666,\n \"acc_norm_stderr\": 0.03716177437566017\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620332,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620332\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5838150289017341,\n \"acc_stderr\": 0.03758517775404947,\n \"acc_norm\": 0.5838150289017341,\n \"acc_norm_stderr\": 0.03758517775404947\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.35294117647058826,\n \"acc_stderr\": 0.04755129616062946,\n \"acc_norm\": 0.35294117647058826,\n \"acc_norm_stderr\": 0.04755129616062946\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5574468085106383,\n \"acc_stderr\": 0.032469569197899575,\n \"acc_norm\": 0.5574468085106383,\n \"acc_norm_stderr\": 0.032469569197899575\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.041227371113703316,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.041227371113703316\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3994708994708995,\n \"acc_stderr\": 0.02522545028406788,\n \"acc_norm\": 0.3994708994708995,\n \"acc_norm_stderr\": 0.02522545028406788\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.3968253968253968,\n \"acc_stderr\": 0.04375888492727061,\n \"acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.04375888492727061\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7483870967741936,\n \"acc_stderr\": 0.024685979286239956,\n \"acc_norm\": 0.7483870967741936,\n \"acc_norm_stderr\": 0.024685979286239956\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.03514528562175008,\n \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.03514528562175008\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252607,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252607\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009182,\n \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009182\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7525252525252525,\n \"acc_stderr\": 0.030746300742124498,\n \"acc_norm\": 0.7525252525252525,\n \"acc_norm_stderr\": 0.030746300742124498\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.026148483469153314,\n \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.026148483469153314\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6205128205128205,\n \"acc_stderr\": 0.024603626924097417,\n \"acc_norm\": 0.6205128205128205,\n \"acc_norm_stderr\": 0.024603626924097417\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.337037037037037,\n \"acc_stderr\": 0.028820884666253252,\n \"acc_norm\": 0.337037037037037,\n \"acc_norm_stderr\": 0.028820884666253252\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6260504201680672,\n \"acc_stderr\": 0.031429466378837076,\n \"acc_norm\": 0.6260504201680672,\n \"acc_norm_stderr\": 0.031429466378837076\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7944954128440367,\n \"acc_stderr\": 0.01732435232501601,\n \"acc_norm\": 0.7944954128440367,\n \"acc_norm_stderr\": 0.01732435232501601\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5046296296296297,\n \"acc_stderr\": 0.03409825519163572,\n \"acc_norm\": 0.5046296296296297,\n \"acc_norm_stderr\": 0.03409825519163572\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8137254901960784,\n \"acc_stderr\": 0.027325470966716312,\n \"acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.027325470966716312\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7763713080168776,\n \"acc_stderr\": 0.027123298205229966,\n \"acc_norm\": 0.7763713080168776,\n \"acc_norm_stderr\": 0.027123298205229966\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.031146796482972465,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.031146796482972465\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.037683359597287434,\n \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.037683359597287434\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04186091791394607,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04186091791394607\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7791411042944786,\n \"acc_stderr\": 0.03259177392742178,\n \"acc_norm\": 0.7791411042944786,\n \"acc_norm_stderr\": 0.03259177392742178\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.41964285714285715,\n \"acc_stderr\": 0.04684099321077106,\n \"acc_norm\": 0.41964285714285715,\n \"acc_norm_stderr\": 0.04684099321077106\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7961165048543689,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.7961165048543689,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597528,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597528\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8135376756066411,\n \"acc_stderr\": 0.013927751372001501,\n \"acc_norm\": 0.8135376756066411,\n \"acc_norm_stderr\": 0.013927751372001501\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6994219653179191,\n \"acc_stderr\": 0.0246853168672578,\n \"acc_norm\": 0.6994219653179191,\n \"acc_norm_stderr\": 0.0246853168672578\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4033519553072626,\n \"acc_stderr\": 0.01640712303219525,\n \"acc_norm\": 0.4033519553072626,\n \"acc_norm_stderr\": 0.01640712303219525\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7320261437908496,\n \"acc_stderr\": 0.02536060379624255,\n \"acc_norm\": 0.7320261437908496,\n \"acc_norm_stderr\": 0.02536060379624255\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7009646302250804,\n \"acc_stderr\": 0.02600330111788514,\n \"acc_norm\": 0.7009646302250804,\n \"acc_norm_stderr\": 0.02600330111788514\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7067901234567902,\n \"acc_stderr\": 0.025329888171900926,\n \"acc_norm\": 0.7067901234567902,\n \"acc_norm_stderr\": 0.025329888171900926\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.49645390070921985,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.49645390070921985,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.44784876140808344,\n \"acc_stderr\": 0.01270058240476822,\n \"acc_norm\": 0.44784876140808344,\n \"acc_norm_stderr\": 0.01270058240476822\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6397058823529411,\n \"acc_stderr\": 0.029163128570670733,\n \"acc_norm\": 0.6397058823529411,\n \"acc_norm_stderr\": 0.029163128570670733\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.019070985589687495,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.019070985589687495\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7020408163265306,\n \"acc_stderr\": 0.029279567411065677,\n \"acc_norm\": 0.7020408163265306,\n \"acc_norm_stderr\": 0.029279567411065677\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7960199004975125,\n \"acc_stderr\": 0.02849317624532607,\n \"acc_norm\": 0.7960199004975125,\n \"acc_norm_stderr\": 0.02849317624532607\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774709,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774709\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5542168674698795,\n \"acc_stderr\": 0.03869543323472101,\n \"acc_norm\": 0.5542168674698795,\n \"acc_norm_stderr\": 0.03869543323472101\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8011695906432749,\n \"acc_stderr\": 0.03061111655743253,\n \"acc_norm\": 0.8011695906432749,\n \"acc_norm_stderr\": 0.03061111655743253\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2802937576499388,\n \"mc1_stderr\": 0.015723139524608767,\n \"mc2\": 0.435601924823795,\n \"mc2_stderr\": 0.014179199089974604\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7932123125493291,\n \"acc_stderr\": 0.011382566829235805\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3510235026535254,\n \"acc_stderr\": 0.01314694594139722\n }\n}\n```", "repo_url": "https://huggingface.co/HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-27-32.660899.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["**/details_harness|winogrande|5_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-27-32.660899.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_27_32.660899", "path": ["results_2024-01-04T13-27-32.660899.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-27-32.660899.parquet"]}]}]}
2024-01-04T13:30:17+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K Dataset automatically created during the evaluation run of model HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:27:32.660899(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K\n\n\n\nDataset automatically created during the evaluation run of model HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:27:32.660899(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K\n\n\n\nDataset automatically created during the evaluation run of model HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:27:32.660899(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 197, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K\n\n\n\nDataset automatically created during the evaluation run of model HenryJJ/Instruct_Mistral-7B-v0.1_Dolly15K on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:27:32.660899(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
a1d491d5760b8df2af9532f4353632742fcde1ed
# Dataset Card for Evaluation run of fblgit/UNA-POLAR-10.7B-InstructMath-v2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [fblgit/UNA-POLAR-10.7B-InstructMath-v2](https://huggingface.co/fblgit/UNA-POLAR-10.7B-InstructMath-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_fblgit__UNA-POLAR-10.7B-InstructMath-v2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:31:30.327640](https://huggingface.co/datasets/open-llm-leaderboard/details_fblgit__UNA-POLAR-10.7B-InstructMath-v2/blob/main/results_2024-01-04T13-31-30.327640.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6640742249502881, "acc_stderr": 0.03170094021991354, "acc_norm": 0.6648977588200776, "acc_norm_stderr": 0.03234593494804173, "mc1": 0.572827417380661, "mc1_stderr": 0.017316834410963926, "mc2": 0.717276829204571, "mc2_stderr": 0.0151125842350684 }, "harness|arc:challenge|25": { "acc": 0.6825938566552902, "acc_stderr": 0.013602239088038167, "acc_norm": 0.7073378839590444, "acc_norm_stderr": 0.013295916103619429 }, "harness|hellaswag|10": { "acc": 0.7093208524198367, "acc_stderr": 0.004531477407589653, "acc_norm": 0.8819956184027086, "acc_norm_stderr": 0.0032195397905004737 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6, "acc_stderr": 0.042320736951515885, "acc_norm": 0.6, "acc_norm_stderr": 0.042320736951515885 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7368421052631579, "acc_stderr": 0.03583496176361072, "acc_norm": 0.7368421052631579, "acc_norm_stderr": 0.03583496176361072 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6943396226415094, "acc_stderr": 0.028353298073322666, "acc_norm": 0.6943396226415094, "acc_norm_stderr": 0.028353298073322666 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.653179190751445, "acc_stderr": 0.036291466701596636, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6170212765957447, "acc_stderr": 0.03177821250236922, "acc_norm": 0.6170212765957447, "acc_norm_stderr": 0.03177821250236922 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6068965517241379, "acc_stderr": 0.040703290137070705, "acc_norm": 0.6068965517241379, "acc_norm_stderr": 0.040703290137070705 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4708994708994709, "acc_stderr": 0.025707658614154964, "acc_norm": 0.4708994708994709, "acc_norm_stderr": 0.025707658614154964 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42063492063492064, "acc_stderr": 0.04415438226743744, "acc_norm": 0.42063492063492064, "acc_norm_stderr": 0.04415438226743744 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.04793724854411019, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411019 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8129032258064516, "acc_stderr": 0.022185710092252252, "acc_norm": 0.8129032258064516, "acc_norm_stderr": 0.022185710092252252 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8911917098445595, "acc_stderr": 0.022473253332768763, "acc_norm": 0.8911917098445595, "acc_norm_stderr": 0.022473253332768763 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6564102564102564, "acc_stderr": 0.024078696580635474, "acc_norm": 0.6564102564102564, "acc_norm_stderr": 0.024078696580635474 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3592592592592593, "acc_stderr": 0.029252905927251972, "acc_norm": 0.3592592592592593, "acc_norm_stderr": 0.029252905927251972 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7184873949579832, "acc_stderr": 0.02921354941437217, "acc_norm": 0.7184873949579832, "acc_norm_stderr": 0.02921354941437217 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.39072847682119205, "acc_stderr": 0.039837983066598075, "acc_norm": 0.39072847682119205, "acc_norm_stderr": 0.039837983066598075 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8495412844036697, "acc_stderr": 0.015328563932669235, "acc_norm": 0.8495412844036697, "acc_norm_stderr": 0.015328563932669235 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5833333333333334, "acc_stderr": 0.033622774366080424, "acc_norm": 0.5833333333333334, "acc_norm_stderr": 0.033622774366080424 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8529411764705882, "acc_stderr": 0.02485747808025046, "acc_norm": 0.8529411764705882, "acc_norm_stderr": 0.02485747808025046 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8438818565400844, "acc_stderr": 0.023627159460318667, "acc_norm": 0.8438818565400844, "acc_norm_stderr": 0.023627159460318667 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7251908396946565, "acc_stderr": 0.03915345408847836, "acc_norm": 0.7251908396946565, "acc_norm_stderr": 0.03915345408847836 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.03849856098794088, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.03849856098794088 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8240740740740741, "acc_stderr": 0.03680918141673881, "acc_norm": 0.8240740740740741, "acc_norm_stderr": 0.03680918141673881 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.03351953879521269, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.03351953879521269 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4375, "acc_stderr": 0.04708567521880525, "acc_norm": 0.4375, "acc_norm_stderr": 0.04708567521880525 }, "harness|hendrycksTest-management|5": { "acc": 0.8252427184466019, "acc_stderr": 0.0376017800602662, "acc_norm": 0.8252427184466019, "acc_norm_stderr": 0.0376017800602662 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8717948717948718, "acc_stderr": 0.02190190511507333, "acc_norm": 0.8717948717948718, "acc_norm_stderr": 0.02190190511507333 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8084291187739464, "acc_stderr": 0.01407285931045195, "acc_norm": 0.8084291187739464, "acc_norm_stderr": 0.01407285931045195 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7485549132947977, "acc_stderr": 0.023357365785874037, "acc_norm": 0.7485549132947977, "acc_norm_stderr": 0.023357365785874037 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3843575418994413, "acc_stderr": 0.016269088663959402, "acc_norm": 0.3843575418994413, "acc_norm_stderr": 0.016269088663959402 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.761437908496732, "acc_stderr": 0.02440439492808787, "acc_norm": 0.761437908496732, "acc_norm_stderr": 0.02440439492808787 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7234726688102894, "acc_stderr": 0.02540383297817961, "acc_norm": 0.7234726688102894, "acc_norm_stderr": 0.02540383297817961 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7839506172839507, "acc_stderr": 0.022899162918445806, "acc_norm": 0.7839506172839507, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5070921985815603, "acc_stderr": 0.02982449855912901, "acc_norm": 0.5070921985815603, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.485006518904824, "acc_stderr": 0.012764493202193255, "acc_norm": 0.485006518904824, "acc_norm_stderr": 0.012764493202193255 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7463235294117647, "acc_stderr": 0.026431329870789527, "acc_norm": 0.7463235294117647, "acc_norm_stderr": 0.026431329870789527 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6862745098039216, "acc_stderr": 0.01877168389352817, "acc_norm": 0.6862745098039216, "acc_norm_stderr": 0.01877168389352817 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7551020408163265, "acc_stderr": 0.027529637440174927, "acc_norm": 0.7551020408163265, "acc_norm_stderr": 0.027529637440174927 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.02650859065623327, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.02650859065623327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352203, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-virology|5": { "acc": 0.5783132530120482, "acc_stderr": 0.038444531817709175, "acc_norm": 0.5783132530120482, "acc_norm_stderr": 0.038444531817709175 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.783625730994152, "acc_stderr": 0.03158149539338733, "acc_norm": 0.783625730994152, "acc_norm_stderr": 0.03158149539338733 }, "harness|truthfulqa:mc|0": { "mc1": 0.572827417380661, "mc1_stderr": 0.017316834410963926, "mc2": 0.717276829204571, "mc2_stderr": 0.0151125842350684 }, "harness|winogrande|5": { "acc": 0.829518547750592, "acc_stderr": 0.010569021122825897 }, "harness|gsm8k|5": { "acc": 0.6474601971190296, "acc_stderr": 0.013159909755930333 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_fblgit__UNA-POLAR-10.7B-InstructMath-v2
[ "region:us" ]
2024-01-04T13:33:52+00:00
{"pretty_name": "Evaluation run of fblgit/UNA-POLAR-10.7B-InstructMath-v2", "dataset_summary": "Dataset automatically created during the evaluation run of model [fblgit/UNA-POLAR-10.7B-InstructMath-v2](https://huggingface.co/fblgit/UNA-POLAR-10.7B-InstructMath-v2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_fblgit__UNA-POLAR-10.7B-InstructMath-v2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:31:30.327640](https://huggingface.co/datasets/open-llm-leaderboard/details_fblgit__UNA-POLAR-10.7B-InstructMath-v2/blob/main/results_2024-01-04T13-31-30.327640.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6640742249502881,\n \"acc_stderr\": 0.03170094021991354,\n \"acc_norm\": 0.6648977588200776,\n \"acc_norm_stderr\": 0.03234593494804173,\n \"mc1\": 0.572827417380661,\n \"mc1_stderr\": 0.017316834410963926,\n \"mc2\": 0.717276829204571,\n \"mc2_stderr\": 0.0151125842350684\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6825938566552902,\n \"acc_stderr\": 0.013602239088038167,\n \"acc_norm\": 0.7073378839590444,\n \"acc_norm_stderr\": 0.013295916103619429\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7093208524198367,\n \"acc_stderr\": 0.004531477407589653,\n \"acc_norm\": 0.8819956184027086,\n \"acc_norm_stderr\": 0.0032195397905004737\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.042320736951515885,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.042320736951515885\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7368421052631579,\n \"acc_stderr\": 0.03583496176361072,\n \"acc_norm\": 0.7368421052631579,\n \"acc_norm_stderr\": 0.03583496176361072\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6943396226415094,\n \"acc_stderr\": 0.028353298073322666,\n \"acc_norm\": 0.6943396226415094,\n \"acc_norm_stderr\": 0.028353298073322666\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6170212765957447,\n \"acc_stderr\": 0.03177821250236922,\n \"acc_norm\": 0.6170212765957447,\n \"acc_norm_stderr\": 0.03177821250236922\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6068965517241379,\n \"acc_stderr\": 0.040703290137070705,\n \"acc_norm\": 0.6068965517241379,\n \"acc_norm_stderr\": 0.040703290137070705\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4708994708994709,\n \"acc_stderr\": 0.025707658614154964,\n \"acc_norm\": 0.4708994708994709,\n \"acc_norm_stderr\": 0.025707658614154964\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42063492063492064,\n \"acc_stderr\": 0.04415438226743744,\n \"acc_norm\": 0.42063492063492064,\n \"acc_norm_stderr\": 0.04415438226743744\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411019,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411019\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8129032258064516,\n \"acc_stderr\": 0.022185710092252252,\n \"acc_norm\": 0.8129032258064516,\n \"acc_norm_stderr\": 0.022185710092252252\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8911917098445595,\n \"acc_stderr\": 0.022473253332768763,\n \"acc_norm\": 0.8911917098445595,\n \"acc_norm_stderr\": 0.022473253332768763\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6564102564102564,\n \"acc_stderr\": 0.024078696580635474,\n \"acc_norm\": 0.6564102564102564,\n \"acc_norm_stderr\": 0.024078696580635474\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3592592592592593,\n \"acc_stderr\": 0.029252905927251972,\n \"acc_norm\": 0.3592592592592593,\n \"acc_norm_stderr\": 0.029252905927251972\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7184873949579832,\n \"acc_stderr\": 0.02921354941437217,\n \"acc_norm\": 0.7184873949579832,\n \"acc_norm_stderr\": 0.02921354941437217\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.39072847682119205,\n \"acc_stderr\": 0.039837983066598075,\n \"acc_norm\": 0.39072847682119205,\n \"acc_norm_stderr\": 0.039837983066598075\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8495412844036697,\n \"acc_stderr\": 0.015328563932669235,\n \"acc_norm\": 0.8495412844036697,\n \"acc_norm_stderr\": 0.015328563932669235\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5833333333333334,\n \"acc_stderr\": 0.033622774366080424,\n \"acc_norm\": 0.5833333333333334,\n \"acc_norm_stderr\": 0.033622774366080424\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.02485747808025046,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.02485747808025046\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8438818565400844,\n \"acc_stderr\": 0.023627159460318667,\n \"acc_norm\": 0.8438818565400844,\n \"acc_norm_stderr\": 0.023627159460318667\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7251908396946565,\n \"acc_stderr\": 0.03915345408847836,\n \"acc_norm\": 0.7251908396946565,\n \"acc_norm_stderr\": 0.03915345408847836\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8240740740740741,\n \"acc_stderr\": 0.03680918141673881,\n \"acc_norm\": 0.8240740740740741,\n \"acc_norm_stderr\": 0.03680918141673881\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.03351953879521269,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.03351953879521269\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4375,\n \"acc_stderr\": 0.04708567521880525,\n \"acc_norm\": 0.4375,\n \"acc_norm_stderr\": 0.04708567521880525\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.0376017800602662,\n \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.0376017800602662\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8717948717948718,\n \"acc_stderr\": 0.02190190511507333,\n \"acc_norm\": 0.8717948717948718,\n \"acc_norm_stderr\": 0.02190190511507333\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8084291187739464,\n \"acc_stderr\": 0.01407285931045195,\n \"acc_norm\": 0.8084291187739464,\n \"acc_norm_stderr\": 0.01407285931045195\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7485549132947977,\n \"acc_stderr\": 0.023357365785874037,\n \"acc_norm\": 0.7485549132947977,\n \"acc_norm_stderr\": 0.023357365785874037\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3843575418994413,\n \"acc_stderr\": 0.016269088663959402,\n \"acc_norm\": 0.3843575418994413,\n \"acc_norm_stderr\": 0.016269088663959402\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.761437908496732,\n \"acc_stderr\": 0.02440439492808787,\n \"acc_norm\": 0.761437908496732,\n \"acc_norm_stderr\": 0.02440439492808787\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7234726688102894,\n \"acc_stderr\": 0.02540383297817961,\n \"acc_norm\": 0.7234726688102894,\n \"acc_norm_stderr\": 0.02540383297817961\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7839506172839507,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.7839506172839507,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5070921985815603,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.5070921985815603,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.485006518904824,\n \"acc_stderr\": 0.012764493202193255,\n \"acc_norm\": 0.485006518904824,\n \"acc_norm_stderr\": 0.012764493202193255\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7463235294117647,\n \"acc_stderr\": 0.026431329870789527,\n \"acc_norm\": 0.7463235294117647,\n \"acc_norm_stderr\": 0.026431329870789527\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6862745098039216,\n \"acc_stderr\": 0.01877168389352817,\n \"acc_norm\": 0.6862745098039216,\n \"acc_norm_stderr\": 0.01877168389352817\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7551020408163265,\n \"acc_stderr\": 0.027529637440174927,\n \"acc_norm\": 0.7551020408163265,\n \"acc_norm_stderr\": 0.027529637440174927\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.02650859065623327,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.02650859065623327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5783132530120482,\n \"acc_stderr\": 0.038444531817709175,\n \"acc_norm\": 0.5783132530120482,\n \"acc_norm_stderr\": 0.038444531817709175\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.783625730994152,\n \"acc_stderr\": 0.03158149539338733,\n \"acc_norm\": 0.783625730994152,\n \"acc_norm_stderr\": 0.03158149539338733\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.572827417380661,\n \"mc1_stderr\": 0.017316834410963926,\n \"mc2\": 0.717276829204571,\n \"mc2_stderr\": 0.0151125842350684\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.829518547750592,\n \"acc_stderr\": 0.010569021122825897\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6474601971190296,\n \"acc_stderr\": 0.013159909755930333\n }\n}\n```", "repo_url": "https://huggingface.co/fblgit/UNA-POLAR-10.7B-InstructMath-v2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-31-30.327640.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["**/details_harness|winogrande|5_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-31-30.327640.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_31_30.327640", "path": ["results_2024-01-04T13-31-30.327640.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-31-30.327640.parquet"]}]}]}
2024-01-04T13:34:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of fblgit/UNA-POLAR-10.7B-InstructMath-v2 Dataset automatically created during the evaluation run of model fblgit/UNA-POLAR-10.7B-InstructMath-v2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:31:30.327640(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of fblgit/UNA-POLAR-10.7B-InstructMath-v2\n\n\n\nDataset automatically created during the evaluation run of model fblgit/UNA-POLAR-10.7B-InstructMath-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:31:30.327640(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of fblgit/UNA-POLAR-10.7B-InstructMath-v2\n\n\n\nDataset automatically created during the evaluation run of model fblgit/UNA-POLAR-10.7B-InstructMath-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:31:30.327640(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 199, 69, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of fblgit/UNA-POLAR-10.7B-InstructMath-v2\n\n\n\nDataset automatically created during the evaluation run of model fblgit/UNA-POLAR-10.7B-InstructMath-v2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:31:30.327640(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
bec5a4c4c71a04cc0ed871af8cff7f1dd735f66c
# preprocessed_SearchQA The SearchQA question-answer pairs originate from J! Archive2, which comprehensively archives all question-answer pairs from the renowned television show Jeopardy! The passages, sourced from Google search web page snippets. We offer passage metadata, encompassing details like 'air_date,' 'category,' 'value,' 'round,' and 'show_number,' enabling you to enhance retrieval performance at your discretion. Should you require further details about SearchQA, please refer to below links. [Github](https://github.com/nyu-dl/dl4ir-searchQA)<br> [Paper](https://arxiv.org/abs/1704.05179)<br> The dataset is derived from [searhQA](https://huggingface.co/datasets/search_qa).<br> This preprocessed dataset is for RAG. For more information about our task, visit our [repository](https://github.com/NomaDamas/RAGchain)!<br> Preprocess SearchQA dataset code for RAG benchmark. <br> More information, refer to this link! [huggingface](https://huggingface.co/datasets/NomaDamas/search_qa_split)
NomaDamas/split_search_qa
[ "license:unknown", "arxiv:1704.05179", "region:us" ]
2024-01-04T13:34:18+00:00
{"license": "unknown", "dataset_info": [{"config_name": "corpus", "features": [{"name": "query_id", "dtype": "string"}, {"name": "snippets", "dtype": "string"}, {"name": "air_date", "dtype": "string"}, {"name": "category", "dtype": "string"}, {"name": "value", "dtype": "string"}, {"name": "round", "dtype": "string"}, {"name": "show_number", "dtype": "int32"}, {"name": "doc_id", "dtype": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 6252715344, "num_examples": 14120776}], "download_size": 3271155810, "dataset_size": 6252715344}, {"config_name": "qa_data", "features": [{"name": "query_id", "dtype": "string"}, {"name": "question", "dtype": "string"}, {"name": "answer", "dtype": "string"}, {"name": "search_results", "struct": [{"name": "related_links", "sequence": "string"}, {"name": "snippets", "sequence": "string"}, {"name": "titles", "sequence": "string"}, {"name": "urls", "sequence": "string"}]}, {"name": "doc_id", "sequence": "string"}, {"name": "__index_level_0__", "dtype": "int64"}], "splits": [{"name": "train", "num_bytes": 6503932619, "num_examples": 173397}, {"name": "test", "num_bytes": 1830028629, "num_examples": 43350}], "download_size": 5008413626, "dataset_size": 8333961248}], "configs": [{"config_name": "corpus", "data_files": [{"split": "train", "path": "corpus/train-*"}]}, {"config_name": "qa_data", "data_files": [{"split": "train", "path": "qa_data/train-*"}, {"split": "test", "path": "qa_data/test-*"}]}]}
2024-01-04T13:52:53+00:00
[ "1704.05179" ]
[]
TAGS #license-unknown #arxiv-1704.05179 #region-us
# preprocessed_SearchQA The SearchQA question-answer pairs originate from J! Archive2, which comprehensively archives all question-answer pairs from the renowned television show Jeopardy! The passages, sourced from Google search web page snippets. We offer passage metadata, encompassing details like 'air_date,' 'category,' 'value,' 'round,' and 'show_number,' enabling you to enhance retrieval performance at your discretion. Should you require further details about SearchQA, please refer to below links. Github<br> Paper<br> The dataset is derived from searhQA.<br> This preprocessed dataset is for RAG. For more information about our task, visit our repository!<br> Preprocess SearchQA dataset code for RAG benchmark. <br> More information, refer to this link! huggingface
[ "# preprocessed_SearchQA\n\nThe SearchQA question-answer pairs originate from J! Archive2, which comprehensively archives all question-answer pairs\nfrom the renowned television show Jeopardy! The passages, sourced from Google search web page snippets.\nWe offer passage metadata, encompassing details like 'air_date,' 'category,' 'value,' 'round,' and 'show_number,'\nenabling you to enhance retrieval performance at your discretion.\nShould you require further details about SearchQA, please refer to below links.\n\nGithub<br>\nPaper<br>\n\nThe dataset is derived from searhQA.<br>\nThis preprocessed dataset is for RAG. For more information about our task, visit our repository!<br>\n\nPreprocess SearchQA dataset code for RAG benchmark. <br>\nMore information, refer to this link! huggingface" ]
[ "TAGS\n#license-unknown #arxiv-1704.05179 #region-us \n", "# preprocessed_SearchQA\n\nThe SearchQA question-answer pairs originate from J! Archive2, which comprehensively archives all question-answer pairs\nfrom the renowned television show Jeopardy! The passages, sourced from Google search web page snippets.\nWe offer passage metadata, encompassing details like 'air_date,' 'category,' 'value,' 'round,' and 'show_number,'\nenabling you to enhance retrieval performance at your discretion.\nShould you require further details about SearchQA, please refer to below links.\n\nGithub<br>\nPaper<br>\n\nThe dataset is derived from searhQA.<br>\nThis preprocessed dataset is for RAG. For more information about our task, visit our repository!<br>\n\nPreprocess SearchQA dataset code for RAG benchmark. <br>\nMore information, refer to this link! huggingface" ]
[ 21, 211 ]
[ "passage: TAGS\n#license-unknown #arxiv-1704.05179 #region-us \n# preprocessed_SearchQA\n\nThe SearchQA question-answer pairs originate from J! Archive2, which comprehensively archives all question-answer pairs\nfrom the renowned television show Jeopardy! The passages, sourced from Google search web page snippets.\nWe offer passage metadata, encompassing details like 'air_date,' 'category,' 'value,' 'round,' and 'show_number,'\nenabling you to enhance retrieval performance at your discretion.\nShould you require further details about SearchQA, please refer to below links.\n\nGithub<br>\nPaper<br>\n\nThe dataset is derived from searhQA.<br>\nThis preprocessed dataset is for RAG. For more information about our task, visit our repository!<br>\n\nPreprocess SearchQA dataset code for RAG benchmark. <br>\nMore information, refer to this link! huggingface" ]
d644c84bb8e25289e5a6ef9ace693d6f6e9fc23a
# Dataset Card for Evaluation run of SyedAbdul/test-7B-slerp <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [SyedAbdul/test-7B-slerp](https://huggingface.co/SyedAbdul/test-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_SyedAbdul__test-7B-slerp", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:37:15.686780](https://huggingface.co/datasets/open-llm-leaderboard/details_SyedAbdul__test-7B-slerp/blob/main/results_2024-01-04T13-37-15.686780.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6489692801245827, "acc_stderr": 0.03208856045898211, "acc_norm": 0.649945556226307, "acc_norm_stderr": 0.03273989306811606, "mc1": 0.46266829865361075, "mc1_stderr": 0.01745464515097059, "mc2": 0.6259520494051883, "mc2_stderr": 0.014977076792645322 }, "harness|arc:challenge|25": { "acc": 0.6450511945392492, "acc_stderr": 0.01398303690409409, "acc_norm": 0.6808873720136519, "acc_norm_stderr": 0.013621696119173311 }, "harness|hellaswag|10": { "acc": 0.6744672376020713, "acc_stderr": 0.004676159299105416, "acc_norm": 0.8607847042421828, "acc_norm_stderr": 0.003454635760066236 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695236, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695236 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6296296296296297, "acc_stderr": 0.041716541613545426, "acc_norm": 0.6296296296296297, "acc_norm_stderr": 0.041716541613545426 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7105263157894737, "acc_stderr": 0.03690677986137283, "acc_norm": 0.7105263157894737, "acc_norm_stderr": 0.03690677986137283 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.6, "acc_stderr": 0.04923659639173309, "acc_norm": 0.6, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7018867924528301, "acc_stderr": 0.028152837942493864, "acc_norm": 0.7018867924528301, "acc_norm_stderr": 0.028152837942493864 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.54, "acc_stderr": 0.05009082659620333, "acc_norm": 0.54, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.03643037168958548, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.03643037168958548 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5617021276595745, "acc_stderr": 0.03243618636108101, "acc_norm": 0.5617021276595745, "acc_norm_stderr": 0.03243618636108101 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370332, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370332 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.43386243386243384, "acc_stderr": 0.02552503438247489, "acc_norm": 0.43386243386243384, "acc_norm_stderr": 0.02552503438247489 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.4, "acc_stderr": 0.04923659639173309, "acc_norm": 0.4, "acc_norm_stderr": 0.04923659639173309 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.023415293433568525, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.023415293433568525 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5221674876847291, "acc_stderr": 0.03514528562175007, "acc_norm": 0.5221674876847291, "acc_norm_stderr": 0.03514528562175007 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7818181818181819, "acc_stderr": 0.03225078108306289, "acc_norm": 0.7818181818181819, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267045, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267045 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033456, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033456 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6743589743589744, "acc_stderr": 0.02375966576741229, "acc_norm": 0.6743589743589744, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.028317533496066496, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.028317533496066496 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6974789915966386, "acc_stderr": 0.029837962388291932, "acc_norm": 0.6974789915966386, "acc_norm_stderr": 0.029837962388291932 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374303, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374303 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5, "acc_stderr": 0.034099716973523674, "acc_norm": 0.5, "acc_norm_stderr": 0.034099716973523674 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8137254901960784, "acc_stderr": 0.027325470966716312, "acc_norm": 0.8137254901960784, "acc_norm_stderr": 0.027325470966716312 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290913, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290913 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.034981493854624734, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.034981493854624734 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7520661157024794, "acc_stderr": 0.03941897526516303, "acc_norm": 0.7520661157024794, "acc_norm_stderr": 0.03941897526516303 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8333333333333334, "acc_stderr": 0.03602814176392645, "acc_norm": 0.8333333333333334, "acc_norm_stderr": 0.03602814176392645 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.021586494001281376, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.021586494001281376 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8339719029374202, "acc_stderr": 0.013306478243066302, "acc_norm": 0.8339719029374202, "acc_norm_stderr": 0.013306478243066302 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7283236994219653, "acc_stderr": 0.023948512905468365, "acc_norm": 0.7283236994219653, "acc_norm_stderr": 0.023948512905468365 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3854748603351955, "acc_stderr": 0.016277927039638193, "acc_norm": 0.3854748603351955, "acc_norm_stderr": 0.016277927039638193 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7222222222222222, "acc_stderr": 0.025646863097137897, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.025646863097137897 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6945337620578779, "acc_stderr": 0.02616058445014045, "acc_norm": 0.6945337620578779, "acc_norm_stderr": 0.02616058445014045 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7623456790123457, "acc_stderr": 0.02368359183700856, "acc_norm": 0.7623456790123457, "acc_norm_stderr": 0.02368359183700856 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4471968709256845, "acc_stderr": 0.012698825252435106, "acc_norm": 0.4471968709256845, "acc_norm_stderr": 0.012698825252435106 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6801470588235294, "acc_stderr": 0.02833295951403121, "acc_norm": 0.6801470588235294, "acc_norm_stderr": 0.02833295951403121 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6666666666666666, "acc_stderr": 0.019070985589687495, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.019070985589687495 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.02812342933514278, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.02812342933514278 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8557213930348259, "acc_stderr": 0.024845753212306032, "acc_norm": 0.8557213930348259, "acc_norm_stderr": 0.024845753212306032 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.536144578313253, "acc_stderr": 0.03882310850890594, "acc_norm": 0.536144578313253, "acc_norm_stderr": 0.03882310850890594 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.46266829865361075, "mc1_stderr": 0.01745464515097059, "mc2": 0.6259520494051883, "mc2_stderr": 0.014977076792645322 }, "harness|winogrande|5": { "acc": 0.8082083662194159, "acc_stderr": 0.011065209664659527 }, "harness|gsm8k|5": { "acc": 0.6542835481425322, "acc_stderr": 0.01310042299044157 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_SyedAbdul__test-7B-slerp
[ "region:us" ]
2024-01-04T13:39:35+00:00
{"pretty_name": "Evaluation run of SyedAbdul/test-7B-slerp", "dataset_summary": "Dataset automatically created during the evaluation run of model [SyedAbdul/test-7B-slerp](https://huggingface.co/SyedAbdul/test-7B-slerp) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_SyedAbdul__test-7B-slerp\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:37:15.686780](https://huggingface.co/datasets/open-llm-leaderboard/details_SyedAbdul__test-7B-slerp/blob/main/results_2024-01-04T13-37-15.686780.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6489692801245827,\n \"acc_stderr\": 0.03208856045898211,\n \"acc_norm\": 0.649945556226307,\n \"acc_norm_stderr\": 0.03273989306811606,\n \"mc1\": 0.46266829865361075,\n \"mc1_stderr\": 0.01745464515097059,\n \"mc2\": 0.6259520494051883,\n \"mc2_stderr\": 0.014977076792645322\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6450511945392492,\n \"acc_stderr\": 0.01398303690409409,\n \"acc_norm\": 0.6808873720136519,\n \"acc_norm_stderr\": 0.013621696119173311\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6744672376020713,\n \"acc_stderr\": 0.004676159299105416,\n \"acc_norm\": 0.8607847042421828,\n \"acc_norm_stderr\": 0.003454635760066236\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695236,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695236\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6296296296296297,\n \"acc_stderr\": 0.041716541613545426,\n \"acc_norm\": 0.6296296296296297,\n \"acc_norm_stderr\": 0.041716541613545426\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7105263157894737,\n \"acc_stderr\": 0.03690677986137283,\n \"acc_norm\": 0.7105263157894737,\n \"acc_norm_stderr\": 0.03690677986137283\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7018867924528301,\n \"acc_stderr\": 0.028152837942493864,\n \"acc_norm\": 0.7018867924528301,\n \"acc_norm_stderr\": 0.028152837942493864\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.54,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.54,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.03643037168958548,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.03643037168958548\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5617021276595745,\n \"acc_stderr\": 0.03243618636108101,\n \"acc_norm\": 0.5617021276595745,\n \"acc_norm_stderr\": 0.03243618636108101\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370332,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370332\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.43386243386243384,\n \"acc_stderr\": 0.02552503438247489,\n \"acc_norm\": 0.43386243386243384,\n \"acc_norm_stderr\": 0.02552503438247489\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.04923659639173309,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.04923659639173309\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.023415293433568525,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.023415293433568525\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5221674876847291,\n \"acc_stderr\": 0.03514528562175007,\n \"acc_norm\": 0.5221674876847291,\n \"acc_norm_stderr\": 0.03514528562175007\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7818181818181819,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.7818181818181819,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267045,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267045\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033456,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033456\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.028317533496066496,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.028317533496066496\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6974789915966386,\n \"acc_stderr\": 0.029837962388291932,\n \"acc_norm\": 0.6974789915966386,\n \"acc_norm_stderr\": 0.029837962388291932\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374303,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374303\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.034099716973523674,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.034099716973523674\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8137254901960784,\n \"acc_stderr\": 0.027325470966716312,\n \"acc_norm\": 0.8137254901960784,\n \"acc_norm_stderr\": 0.027325470966716312\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290913,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290913\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.034981493854624734,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.034981493854624734\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7520661157024794,\n \"acc_stderr\": 0.03941897526516303,\n \"acc_norm\": 0.7520661157024794,\n \"acc_norm_stderr\": 0.03941897526516303\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8333333333333334,\n \"acc_stderr\": 0.03602814176392645,\n \"acc_norm\": 0.8333333333333334,\n \"acc_norm_stderr\": 0.03602814176392645\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.021586494001281376,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.021586494001281376\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8339719029374202,\n \"acc_stderr\": 0.013306478243066302,\n \"acc_norm\": 0.8339719029374202,\n \"acc_norm_stderr\": 0.013306478243066302\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7283236994219653,\n \"acc_stderr\": 0.023948512905468365,\n \"acc_norm\": 0.7283236994219653,\n \"acc_norm_stderr\": 0.023948512905468365\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3854748603351955,\n \"acc_stderr\": 0.016277927039638193,\n \"acc_norm\": 0.3854748603351955,\n \"acc_norm_stderr\": 0.016277927039638193\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.025646863097137897,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.025646863097137897\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6945337620578779,\n \"acc_stderr\": 0.02616058445014045,\n \"acc_norm\": 0.6945337620578779,\n \"acc_norm_stderr\": 0.02616058445014045\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7623456790123457,\n \"acc_stderr\": 0.02368359183700856,\n \"acc_norm\": 0.7623456790123457,\n \"acc_norm_stderr\": 0.02368359183700856\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4471968709256845,\n \"acc_stderr\": 0.012698825252435106,\n \"acc_norm\": 0.4471968709256845,\n \"acc_norm_stderr\": 0.012698825252435106\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6801470588235294,\n \"acc_stderr\": 0.02833295951403121,\n \"acc_norm\": 0.6801470588235294,\n \"acc_norm_stderr\": 0.02833295951403121\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.019070985589687495,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.019070985589687495\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.02812342933514278,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.02812342933514278\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8557213930348259,\n \"acc_stderr\": 0.024845753212306032,\n \"acc_norm\": 0.8557213930348259,\n \"acc_norm_stderr\": 0.024845753212306032\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.536144578313253,\n \"acc_stderr\": 0.03882310850890594,\n \"acc_norm\": 0.536144578313253,\n \"acc_norm_stderr\": 0.03882310850890594\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.46266829865361075,\n \"mc1_stderr\": 0.01745464515097059,\n \"mc2\": 0.6259520494051883,\n \"mc2_stderr\": 0.014977076792645322\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8082083662194159,\n \"acc_stderr\": 0.011065209664659527\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6542835481425322,\n \"acc_stderr\": 0.01310042299044157\n }\n}\n```", "repo_url": "https://huggingface.co/SyedAbdul/test-7B-slerp", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-37-15.686780.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["**/details_harness|winogrande|5_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-37-15.686780.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_37_15.686780", "path": ["results_2024-01-04T13-37-15.686780.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-37-15.686780.parquet"]}]}]}
2024-01-04T13:39:58+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of SyedAbdul/test-7B-slerp Dataset automatically created during the evaluation run of model SyedAbdul/test-7B-slerp on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:37:15.686780(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of SyedAbdul/test-7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model SyedAbdul/test-7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:37:15.686780(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of SyedAbdul/test-7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model SyedAbdul/test-7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:37:15.686780(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of SyedAbdul/test-7B-slerp\n\n\n\nDataset automatically created during the evaluation run of model SyedAbdul/test-7B-slerp on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:37:15.686780(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
62f6b6472f52ebd54b0721496a77c85b015ed538
# Dataset Card for Evaluation run of BEE-spoke-data/smol_llama-220M-open_instruct <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [BEE-spoke-data/smol_llama-220M-open_instruct](https://huggingface.co/BEE-spoke-data/smol_llama-220M-open_instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_BEE-spoke-data__smol_llama-220M-open_instruct", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:39:47.179873](https://huggingface.co/datasets/open-llm-leaderboard/details_BEE-spoke-data__smol_llama-220M-open_instruct/blob/main/results_2024-01-04T13-39-47.179873.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.25998788076923746, "acc_stderr": 0.030908234134550842, "acc_norm": 0.2615422501208712, "acc_norm_stderr": 0.03173823021382238, "mc1": 0.2423500611995104, "mc1_stderr": 0.01500067437357034, "mc2": 0.4406371478334913, "mc2_stderr": 0.015537102899912702 }, "harness|arc:challenge|25": { "acc": 0.19283276450511946, "acc_stderr": 0.011529055465663345, "acc_norm": 0.25, "acc_norm_stderr": 0.012653835621466646 }, "harness|hellaswag|10": { "acc": 0.27972515435172274, "acc_stderr": 0.004479467619464786, "acc_norm": 0.29705238000398326, "acc_norm_stderr": 0.00456025908319737 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.19, "acc_stderr": 0.039427724440366234, "acc_norm": 0.19, "acc_norm_stderr": 0.039427724440366234 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2222222222222222, "acc_stderr": 0.035914440841969694, "acc_norm": 0.2222222222222222, "acc_norm_stderr": 0.035914440841969694 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.19078947368421054, "acc_stderr": 0.03197565821032499, "acc_norm": 0.19078947368421054, "acc_norm_stderr": 0.03197565821032499 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.18, "acc_stderr": 0.03861229196653696, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653696 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.33962264150943394, "acc_stderr": 0.02914690474779834, "acc_norm": 0.33962264150943394, "acc_norm_stderr": 0.02914690474779834 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.25, "acc_stderr": 0.03621034121889507, "acc_norm": 0.25, "acc_norm_stderr": 0.03621034121889507 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.26, "acc_stderr": 0.04408440022768081, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768081 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.26011560693641617, "acc_stderr": 0.03345036916788992, "acc_norm": 0.26011560693641617, "acc_norm_stderr": 0.03345036916788992 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237655, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237655 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.27, "acc_stderr": 0.044619604333847415, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.26382978723404255, "acc_stderr": 0.02880998985410297, "acc_norm": 0.26382978723404255, "acc_norm_stderr": 0.02880998985410297 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2894736842105263, "acc_stderr": 0.042663394431593935, "acc_norm": 0.2894736842105263, "acc_norm_stderr": 0.042663394431593935 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.25517241379310346, "acc_stderr": 0.03632984052707842, "acc_norm": 0.25517241379310346, "acc_norm_stderr": 0.03632984052707842 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25925925925925924, "acc_stderr": 0.022569897074918417, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.022569897074918417 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.1746031746031746, "acc_stderr": 0.03395490020856113, "acc_norm": 0.1746031746031746, "acc_norm_stderr": 0.03395490020856113 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.267741935483871, "acc_stderr": 0.02518900666021238, "acc_norm": 0.267741935483871, "acc_norm_stderr": 0.02518900666021238 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2857142857142857, "acc_stderr": 0.031785297106427496, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.031785297106427496 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.21818181818181817, "acc_stderr": 0.03225078108306289, "acc_norm": 0.21818181818181817, "acc_norm_stderr": 0.03225078108306289 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2474747474747475, "acc_stderr": 0.030746300742124495, "acc_norm": 0.2474747474747475, "acc_norm_stderr": 0.030746300742124495 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.29015544041450775, "acc_stderr": 0.032752644677915145, "acc_norm": 0.29015544041450775, "acc_norm_stderr": 0.032752644677915145 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.023901157979402534, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.023901157979402534 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.026842057873833706, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.026842057873833706 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3277310924369748, "acc_stderr": 0.030489911417673227, "acc_norm": 0.3277310924369748, "acc_norm_stderr": 0.030489911417673227 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.32450331125827814, "acc_stderr": 0.03822746937658754, "acc_norm": 0.32450331125827814, "acc_norm_stderr": 0.03822746937658754 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3321100917431193, "acc_stderr": 0.020192682985423344, "acc_norm": 0.3321100917431193, "acc_norm_stderr": 0.020192682985423344 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4675925925925926, "acc_stderr": 0.03402801581358966, "acc_norm": 0.4675925925925926, "acc_norm_stderr": 0.03402801581358966 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.030587591351604246, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.030587591351604246 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.2616033755274262, "acc_stderr": 0.02860951671699494, "acc_norm": 0.2616033755274262, "acc_norm_stderr": 0.02860951671699494 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.27802690582959644, "acc_stderr": 0.03006958487449403, "acc_norm": 0.27802690582959644, "acc_norm_stderr": 0.03006958487449403 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.24427480916030533, "acc_stderr": 0.03768335959728742, "acc_norm": 0.24427480916030533, "acc_norm_stderr": 0.03768335959728742 }, "harness|hendrycksTest-international_law|5": { "acc": 0.19008264462809918, "acc_stderr": 0.03581796951709282, "acc_norm": 0.19008264462809918, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.23148148148148148, "acc_stderr": 0.04077494709252626, "acc_norm": 0.23148148148148148, "acc_norm_stderr": 0.04077494709252626 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.26993865030674846, "acc_stderr": 0.034878251684978906, "acc_norm": 0.26993865030674846, "acc_norm_stderr": 0.034878251684978906 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.25892857142857145, "acc_stderr": 0.041577515398656284, "acc_norm": 0.25892857142857145, "acc_norm_stderr": 0.041577515398656284 }, "harness|hendrycksTest-management|5": { "acc": 0.27184466019417475, "acc_stderr": 0.044052680241409216, "acc_norm": 0.27184466019417475, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.20512820512820512, "acc_stderr": 0.026453508054040346, "acc_norm": 0.20512820512820512, "acc_norm_stderr": 0.026453508054040346 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2247765006385696, "acc_stderr": 0.01492744710193717, "acc_norm": 0.2247765006385696, "acc_norm_stderr": 0.01492744710193717 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2254335260115607, "acc_stderr": 0.022497230190967547, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.022497230190967547 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2435754189944134, "acc_stderr": 0.014355911964767864, "acc_norm": 0.2435754189944134, "acc_norm_stderr": 0.014355911964767864 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24509803921568626, "acc_stderr": 0.024630048979824768, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.024630048979824768 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2347266881028939, "acc_stderr": 0.024071805887677045, "acc_norm": 0.2347266881028939, "acc_norm_stderr": 0.024071805887677045 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.21604938271604937, "acc_stderr": 0.022899162918445806, "acc_norm": 0.21604938271604937, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2553191489361702, "acc_stderr": 0.026011992930902013, "acc_norm": 0.2553191489361702, "acc_norm_stderr": 0.026011992930902013 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24185136897001303, "acc_stderr": 0.010936550813827066, "acc_norm": 0.24185136897001303, "acc_norm_stderr": 0.010936550813827066 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.44485294117647056, "acc_stderr": 0.030187532060329376, "acc_norm": 0.44485294117647056, "acc_norm_stderr": 0.030187532060329376 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.2434640522875817, "acc_stderr": 0.017362473762146634, "acc_norm": 0.2434640522875817, "acc_norm_stderr": 0.017362473762146634 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.24545454545454545, "acc_stderr": 0.041220665028782834, "acc_norm": 0.24545454545454545, "acc_norm_stderr": 0.041220665028782834 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.3346938775510204, "acc_stderr": 0.030209235226242307, "acc_norm": 0.3346938775510204, "acc_norm_stderr": 0.030209235226242307 }, "harness|hendrycksTest-sociology|5": { "acc": 0.24875621890547264, "acc_stderr": 0.030567675938916714, "acc_norm": 0.24875621890547264, "acc_norm_stderr": 0.030567675938916714 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.26, "acc_stderr": 0.04408440022768079, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-virology|5": { "acc": 0.18072289156626506, "acc_stderr": 0.02995573785581014, "acc_norm": 0.18072289156626506, "acc_norm_stderr": 0.02995573785581014 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2046783625730994, "acc_stderr": 0.030944459778533207, "acc_norm": 0.2046783625730994, "acc_norm_stderr": 0.030944459778533207 }, "harness|truthfulqa:mc|0": { "mc1": 0.2423500611995104, "mc1_stderr": 0.01500067437357034, "mc2": 0.4406371478334913, "mc2_stderr": 0.015537102899912702 }, "harness|winogrande|5": { "acc": 0.5027624309392266, "acc_stderr": 0.014052271211616441 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_BEE-spoke-data__smol_llama-220M-open_instruct
[ "region:us" ]
2024-01-04T13:41:38+00:00
{"pretty_name": "Evaluation run of BEE-spoke-data/smol_llama-220M-open_instruct", "dataset_summary": "Dataset automatically created during the evaluation run of model [BEE-spoke-data/smol_llama-220M-open_instruct](https://huggingface.co/BEE-spoke-data/smol_llama-220M-open_instruct) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_BEE-spoke-data__smol_llama-220M-open_instruct\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:39:47.179873](https://huggingface.co/datasets/open-llm-leaderboard/details_BEE-spoke-data__smol_llama-220M-open_instruct/blob/main/results_2024-01-04T13-39-47.179873.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.25998788076923746,\n \"acc_stderr\": 0.030908234134550842,\n \"acc_norm\": 0.2615422501208712,\n \"acc_norm_stderr\": 0.03173823021382238,\n \"mc1\": 0.2423500611995104,\n \"mc1_stderr\": 0.01500067437357034,\n \"mc2\": 0.4406371478334913,\n \"mc2_stderr\": 0.015537102899912702\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.19283276450511946,\n \"acc_stderr\": 0.011529055465663345,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.012653835621466646\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.27972515435172274,\n \"acc_stderr\": 0.004479467619464786,\n \"acc_norm\": 0.29705238000398326,\n \"acc_norm_stderr\": 0.00456025908319737\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.19,\n \"acc_stderr\": 0.039427724440366234,\n \"acc_norm\": 0.19,\n \"acc_norm_stderr\": 0.039427724440366234\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2222222222222222,\n \"acc_stderr\": 0.035914440841969694,\n \"acc_norm\": 0.2222222222222222,\n \"acc_norm_stderr\": 0.035914440841969694\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.19078947368421054,\n \"acc_stderr\": 0.03197565821032499,\n \"acc_norm\": 0.19078947368421054,\n \"acc_norm_stderr\": 0.03197565821032499\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653696,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653696\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.33962264150943394,\n \"acc_stderr\": 0.02914690474779834,\n \"acc_norm\": 0.33962264150943394,\n \"acc_norm_stderr\": 0.02914690474779834\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03621034121889507,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03621034121889507\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768081,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768081\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.26011560693641617,\n \"acc_stderr\": 0.03345036916788992,\n \"acc_norm\": 0.26011560693641617,\n \"acc_norm_stderr\": 0.03345036916788992\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237655,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237655\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847415,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847415\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.26382978723404255,\n \"acc_stderr\": 0.02880998985410297,\n \"acc_norm\": 0.26382978723404255,\n \"acc_norm_stderr\": 0.02880998985410297\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2894736842105263,\n \"acc_stderr\": 0.042663394431593935,\n \"acc_norm\": 0.2894736842105263,\n \"acc_norm_stderr\": 0.042663394431593935\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.25517241379310346,\n \"acc_stderr\": 0.03632984052707842,\n \"acc_norm\": 0.25517241379310346,\n \"acc_norm_stderr\": 0.03632984052707842\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.022569897074918417,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.022569897074918417\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.1746031746031746,\n \"acc_stderr\": 0.03395490020856113,\n \"acc_norm\": 0.1746031746031746,\n \"acc_norm_stderr\": 0.03395490020856113\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.267741935483871,\n \"acc_stderr\": 0.02518900666021238,\n \"acc_norm\": 0.267741935483871,\n \"acc_norm_stderr\": 0.02518900666021238\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.031785297106427496,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.031785297106427496\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.21818181818181817,\n \"acc_stderr\": 0.03225078108306289,\n \"acc_norm\": 0.21818181818181817,\n \"acc_norm_stderr\": 0.03225078108306289\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.2474747474747475,\n \"acc_stderr\": 0.030746300742124495,\n \"acc_norm\": 0.2474747474747475,\n \"acc_norm_stderr\": 0.030746300742124495\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.29015544041450775,\n \"acc_stderr\": 0.032752644677915145,\n \"acc_norm\": 0.29015544041450775,\n \"acc_norm_stderr\": 0.032752644677915145\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.023901157979402534,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.023901157979402534\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.026842057873833706,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.026842057873833706\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.3277310924369748,\n \"acc_stderr\": 0.030489911417673227,\n \"acc_norm\": 0.3277310924369748,\n \"acc_norm_stderr\": 0.030489911417673227\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.32450331125827814,\n \"acc_stderr\": 0.03822746937658754,\n \"acc_norm\": 0.32450331125827814,\n \"acc_norm_stderr\": 0.03822746937658754\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.3321100917431193,\n \"acc_stderr\": 0.020192682985423344,\n \"acc_norm\": 0.3321100917431193,\n \"acc_norm_stderr\": 0.020192682985423344\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4675925925925926,\n \"acc_stderr\": 0.03402801581358966,\n \"acc_norm\": 0.4675925925925926,\n \"acc_norm_stderr\": 0.03402801581358966\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.030587591351604246,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.030587591351604246\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.2616033755274262,\n \"acc_stderr\": 0.02860951671699494,\n \"acc_norm\": 0.2616033755274262,\n \"acc_norm_stderr\": 0.02860951671699494\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.27802690582959644,\n \"acc_stderr\": 0.03006958487449403,\n \"acc_norm\": 0.27802690582959644,\n \"acc_norm_stderr\": 0.03006958487449403\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.24427480916030533,\n \"acc_stderr\": 0.03768335959728742,\n \"acc_norm\": 0.24427480916030533,\n \"acc_norm_stderr\": 0.03768335959728742\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.19008264462809918,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.19008264462809918,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.23148148148148148,\n \"acc_stderr\": 0.04077494709252626,\n \"acc_norm\": 0.23148148148148148,\n \"acc_norm_stderr\": 0.04077494709252626\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.26993865030674846,\n \"acc_stderr\": 0.034878251684978906,\n \"acc_norm\": 0.26993865030674846,\n \"acc_norm_stderr\": 0.034878251684978906\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.25892857142857145,\n \"acc_stderr\": 0.041577515398656284,\n \"acc_norm\": 0.25892857142857145,\n \"acc_norm_stderr\": 0.041577515398656284\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.27184466019417475,\n \"acc_stderr\": 0.044052680241409216,\n \"acc_norm\": 0.27184466019417475,\n \"acc_norm_stderr\": 0.044052680241409216\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.20512820512820512,\n \"acc_stderr\": 0.026453508054040346,\n \"acc_norm\": 0.20512820512820512,\n \"acc_norm_stderr\": 0.026453508054040346\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2247765006385696,\n \"acc_stderr\": 0.01492744710193717,\n \"acc_norm\": 0.2247765006385696,\n \"acc_norm_stderr\": 0.01492744710193717\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2254335260115607,\n \"acc_stderr\": 0.022497230190967547,\n \"acc_norm\": 0.2254335260115607,\n \"acc_norm_stderr\": 0.022497230190967547\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2435754189944134,\n \"acc_stderr\": 0.014355911964767864,\n \"acc_norm\": 0.2435754189944134,\n \"acc_norm_stderr\": 0.014355911964767864\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.024630048979824768,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.024630048979824768\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2347266881028939,\n \"acc_stderr\": 0.024071805887677045,\n \"acc_norm\": 0.2347266881028939,\n \"acc_norm_stderr\": 0.024071805887677045\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.21604938271604937,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.21604938271604937,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2553191489361702,\n \"acc_stderr\": 0.026011992930902013,\n \"acc_norm\": 0.2553191489361702,\n \"acc_norm_stderr\": 0.026011992930902013\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24185136897001303,\n \"acc_stderr\": 0.010936550813827066,\n \"acc_norm\": 0.24185136897001303,\n \"acc_norm_stderr\": 0.010936550813827066\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.44485294117647056,\n \"acc_stderr\": 0.030187532060329376,\n \"acc_norm\": 0.44485294117647056,\n \"acc_norm_stderr\": 0.030187532060329376\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.2434640522875817,\n \"acc_stderr\": 0.017362473762146634,\n \"acc_norm\": 0.2434640522875817,\n \"acc_norm_stderr\": 0.017362473762146634\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.24545454545454545,\n \"acc_stderr\": 0.041220665028782834,\n \"acc_norm\": 0.24545454545454545,\n \"acc_norm_stderr\": 0.041220665028782834\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.3346938775510204,\n \"acc_stderr\": 0.030209235226242307,\n \"acc_norm\": 0.3346938775510204,\n \"acc_norm_stderr\": 0.030209235226242307\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.24875621890547264,\n \"acc_stderr\": 0.030567675938916714,\n \"acc_norm\": 0.24875621890547264,\n \"acc_norm_stderr\": 0.030567675938916714\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.18072289156626506,\n \"acc_stderr\": 0.02995573785581014,\n \"acc_norm\": 0.18072289156626506,\n \"acc_norm_stderr\": 0.02995573785581014\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.2046783625730994,\n \"acc_stderr\": 0.030944459778533207,\n \"acc_norm\": 0.2046783625730994,\n \"acc_norm_stderr\": 0.030944459778533207\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2423500611995104,\n \"mc1_stderr\": 0.01500067437357034,\n \"mc2\": 0.4406371478334913,\n \"mc2_stderr\": 0.015537102899912702\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5027624309392266,\n \"acc_stderr\": 0.014052271211616441\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/BEE-spoke-data/smol_llama-220M-open_instruct", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-39-47.179873.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["**/details_harness|winogrande|5_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-39-47.179873.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_39_47.179873", "path": ["results_2024-01-04T13-39-47.179873.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-39-47.179873.parquet"]}]}]}
2024-01-04T13:42:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of BEE-spoke-data/smol_llama-220M-open_instruct Dataset automatically created during the evaluation run of model BEE-spoke-data/smol_llama-220M-open_instruct on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:39:47.179873(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of BEE-spoke-data/smol_llama-220M-open_instruct\n\n\n\nDataset automatically created during the evaluation run of model BEE-spoke-data/smol_llama-220M-open_instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:39:47.179873(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of BEE-spoke-data/smol_llama-220M-open_instruct\n\n\n\nDataset automatically created during the evaluation run of model BEE-spoke-data/smol_llama-220M-open_instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:39:47.179873(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 203, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of BEE-spoke-data/smol_llama-220M-open_instruct\n\n\n\nDataset automatically created during the evaluation run of model BEE-spoke-data/smol_llama-220M-open_instruct on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:39:47.179873(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
3342e80506d2eb8cd327afb88c8affcf42e9ef62
# Dataset Card for Evaluation run of cloudyu/Mixtral_11Bx2_MoE_19B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [cloudyu/Mixtral_11Bx2_MoE_19B](https://huggingface.co/cloudyu/Mixtral_11Bx2_MoE_19B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_cloudyu__Mixtral_11Bx2_MoE_19B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:40:23.223799](https://huggingface.co/datasets/open-llm-leaderboard/details_cloudyu__Mixtral_11Bx2_MoE_19B/blob/main/results_2024-01-04T13-40-23.223799.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6668773796285904, "acc_stderr": 0.03165405330888546, "acc_norm": 0.6676621047943123, "acc_norm_stderr": 0.03229875275948039, "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314747, "mc2": 0.7199912316059361, "mc2_stderr": 0.014982625070109003 }, "harness|arc:challenge|25": { "acc": 0.6843003412969283, "acc_stderr": 0.013582571095815291, "acc_norm": 0.71160409556314, "acc_norm_stderr": 0.013238394422428173 }, "harness|hellaswag|10": { "acc": 0.7140011949810795, "acc_stderr": 0.004509652679395676, "acc_norm": 0.8846843258315077, "acc_norm_stderr": 0.00318749750908742 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.756578947368421, "acc_stderr": 0.034923496688842384, "acc_norm": 0.756578947368421, "acc_norm_stderr": 0.034923496688842384 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956913, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956913 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.04858083574266346, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.04858083574266346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768077, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.625531914893617, "acc_stderr": 0.03163910665367291, "acc_norm": 0.625531914893617, "acc_norm_stderr": 0.03163910665367291 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6344827586206897, "acc_stderr": 0.040131241954243856, "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.040131241954243856 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4973544973544973, "acc_stderr": 0.02575094967813039, "acc_norm": 0.4973544973544973, "acc_norm_stderr": 0.02575094967813039 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8193548387096774, "acc_stderr": 0.021886178567172534, "acc_norm": 0.8193548387096774, "acc_norm_stderr": 0.021886178567172534 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.03517945038691063, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.021995311963644244, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.021995311963644244 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563976, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563976 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.02944316932303154, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.02944316932303154 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.029344572500634332, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.029344572500634332 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8458715596330275, "acc_stderr": 0.015480826865374308, "acc_norm": 0.8458715596330275, "acc_norm_stderr": 0.015480826865374308 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5787037037037037, "acc_stderr": 0.033674621388960775, "acc_norm": 0.5787037037037037, "acc_norm_stderr": 0.033674621388960775 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156862, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156862 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8481012658227848, "acc_stderr": 0.023363878096632446, "acc_norm": 0.8481012658227848, "acc_norm_stderr": 0.023363878096632446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7557251908396947, "acc_stderr": 0.03768335959728743, "acc_norm": 0.7557251908396947, "acc_norm_stderr": 0.03768335959728743 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.03492606476623791, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.03492606476623791 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.0230866350868414, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.0230866350868414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8058748403575989, "acc_stderr": 0.014143970276657569, "acc_norm": 0.8058748403575989, "acc_norm_stderr": 0.014143970276657569 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7543352601156069, "acc_stderr": 0.023176298203992005, "acc_norm": 0.7543352601156069, "acc_norm_stderr": 0.023176298203992005 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.394413407821229, "acc_stderr": 0.01634538676210397, "acc_norm": 0.394413407821229, "acc_norm_stderr": 0.01634538676210397 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7581699346405228, "acc_stderr": 0.024518195641879334, "acc_norm": 0.7581699346405228, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.729903536977492, "acc_stderr": 0.02521804037341062, "acc_norm": 0.729903536977492, "acc_norm_stderr": 0.02521804037341062 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7839506172839507, "acc_stderr": 0.022899162918445806, "acc_norm": 0.7839506172839507, "acc_norm_stderr": 0.022899162918445806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.49645390070921985, "acc_stderr": 0.02982674915328092, "acc_norm": 0.49645390070921985, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.49282920469361147, "acc_stderr": 0.012768922739553311, "acc_norm": 0.49282920469361147, "acc_norm_stderr": 0.012768922739553311 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7389705882352942, "acc_stderr": 0.026679252270103128, "acc_norm": 0.7389705882352942, "acc_norm_stderr": 0.026679252270103128 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6781045751633987, "acc_stderr": 0.018901015322093092, "acc_norm": 0.6781045751633987, "acc_norm_stderr": 0.018901015322093092 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142783, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142783 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.9, "acc_stderr": 0.030151134457776334, "acc_norm": 0.9, "acc_norm_stderr": 0.030151134457776334 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598053, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598053 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03188578017686398, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03188578017686398 }, "harness|truthfulqa:mc|0": { "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314747, "mc2": 0.7199912316059361, "mc2_stderr": 0.014982625070109003 }, "harness|winogrande|5": { "acc": 0.8326756116811366, "acc_stderr": 0.010490608806828075 }, "harness|gsm8k|5": { "acc": 0.6527672479150872, "acc_stderr": 0.013113898382146875 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_cloudyu__Mixtral_11Bx2_MoE_19B
[ "region:us" ]
2024-01-04T13:42:41+00:00
{"pretty_name": "Evaluation run of cloudyu/Mixtral_11Bx2_MoE_19B", "dataset_summary": "Dataset automatically created during the evaluation run of model [cloudyu/Mixtral_11Bx2_MoE_19B](https://huggingface.co/cloudyu/Mixtral_11Bx2_MoE_19B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_cloudyu__Mixtral_11Bx2_MoE_19B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:40:23.223799](https://huggingface.co/datasets/open-llm-leaderboard/details_cloudyu__Mixtral_11Bx2_MoE_19B/blob/main/results_2024-01-04T13-40-23.223799.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6668773796285904,\n \"acc_stderr\": 0.03165405330888546,\n \"acc_norm\": 0.6676621047943123,\n \"acc_norm_stderr\": 0.03229875275948039,\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314747,\n \"mc2\": 0.7199912316059361,\n \"mc2_stderr\": 0.014982625070109003\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6843003412969283,\n \"acc_stderr\": 0.013582571095815291,\n \"acc_norm\": 0.71160409556314,\n \"acc_norm_stderr\": 0.013238394422428173\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7140011949810795,\n \"acc_stderr\": 0.004509652679395676,\n \"acc_norm\": 0.8846843258315077,\n \"acc_norm_stderr\": 0.00318749750908742\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.756578947368421,\n \"acc_stderr\": 0.034923496688842384,\n \"acc_norm\": 0.756578947368421,\n \"acc_norm_stderr\": 0.034923496688842384\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956913,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956913\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266346,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266346\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768077,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768077\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.625531914893617,\n \"acc_stderr\": 0.03163910665367291,\n \"acc_norm\": 0.625531914893617,\n \"acc_norm_stderr\": 0.03163910665367291\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6344827586206897,\n \"acc_stderr\": 0.040131241954243856,\n \"acc_norm\": 0.6344827586206897,\n \"acc_norm_stderr\": 0.040131241954243856\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4973544973544973,\n \"acc_stderr\": 0.02575094967813039,\n \"acc_norm\": 0.4973544973544973,\n \"acc_norm_stderr\": 0.02575094967813039\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8193548387096774,\n \"acc_stderr\": 0.021886178567172534,\n \"acc_norm\": 0.8193548387096774,\n \"acc_norm_stderr\": 0.021886178567172534\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.021995311963644244,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.021995311963644244\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563976,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563976\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.02944316932303154,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.02944316932303154\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.029344572500634332,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.029344572500634332\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8458715596330275,\n \"acc_stderr\": 0.015480826865374308,\n \"acc_norm\": 0.8458715596330275,\n \"acc_norm_stderr\": 0.015480826865374308\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5787037037037037,\n \"acc_stderr\": 0.033674621388960775,\n \"acc_norm\": 0.5787037037037037,\n \"acc_norm_stderr\": 0.033674621388960775\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156862,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156862\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8481012658227848,\n \"acc_stderr\": 0.023363878096632446,\n \"acc_norm\": 0.8481012658227848,\n \"acc_norm_stderr\": 0.023363878096632446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.03768335959728743,\n \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.03768335959728743\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.03492606476623791,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.03492606476623791\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.0230866350868414,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.0230866350868414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8058748403575989,\n \"acc_stderr\": 0.014143970276657569,\n \"acc_norm\": 0.8058748403575989,\n \"acc_norm_stderr\": 0.014143970276657569\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7543352601156069,\n \"acc_stderr\": 0.023176298203992005,\n \"acc_norm\": 0.7543352601156069,\n \"acc_norm_stderr\": 0.023176298203992005\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.394413407821229,\n \"acc_stderr\": 0.01634538676210397,\n \"acc_norm\": 0.394413407821229,\n \"acc_norm_stderr\": 0.01634538676210397\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7581699346405228,\n \"acc_stderr\": 0.024518195641879334,\n \"acc_norm\": 0.7581699346405228,\n \"acc_norm_stderr\": 0.024518195641879334\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.729903536977492,\n \"acc_stderr\": 0.02521804037341062,\n \"acc_norm\": 0.729903536977492,\n \"acc_norm_stderr\": 0.02521804037341062\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7839506172839507,\n \"acc_stderr\": 0.022899162918445806,\n \"acc_norm\": 0.7839506172839507,\n \"acc_norm_stderr\": 0.022899162918445806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.49645390070921985,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.49645390070921985,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.49282920469361147,\n \"acc_stderr\": 0.012768922739553311,\n \"acc_norm\": 0.49282920469361147,\n \"acc_norm_stderr\": 0.012768922739553311\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7389705882352942,\n \"acc_stderr\": 0.026679252270103128,\n \"acc_norm\": 0.7389705882352942,\n \"acc_norm_stderr\": 0.026679252270103128\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6781045751633987,\n \"acc_stderr\": 0.018901015322093092,\n \"acc_norm\": 0.6781045751633987,\n \"acc_norm_stderr\": 0.018901015322093092\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142783,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142783\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.9,\n \"acc_stderr\": 0.030151134457776334,\n \"acc_norm\": 0.9,\n \"acc_norm_stderr\": 0.030151134457776334\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03188578017686398,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03188578017686398\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314747,\n \"mc2\": 0.7199912316059361,\n \"mc2_stderr\": 0.014982625070109003\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8326756116811366,\n \"acc_stderr\": 0.010490608806828075\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6527672479150872,\n \"acc_stderr\": 0.013113898382146875\n }\n}\n```", "repo_url": "https://huggingface.co/cloudyu/Mixtral_11Bx2_MoE_19B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-40-23.223799.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["**/details_harness|winogrande|5_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-40-23.223799.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_40_23.223799", "path": ["results_2024-01-04T13-40-23.223799.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-40-23.223799.parquet"]}]}]}
2024-01-04T13:43:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of cloudyu/Mixtral_11Bx2_MoE_19B Dataset automatically created during the evaluation run of model cloudyu/Mixtral_11Bx2_MoE_19B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:40:23.223799(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of cloudyu/Mixtral_11Bx2_MoE_19B\n\n\n\nDataset automatically created during the evaluation run of model cloudyu/Mixtral_11Bx2_MoE_19B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:40:23.223799(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of cloudyu/Mixtral_11Bx2_MoE_19B\n\n\n\nDataset automatically created during the evaluation run of model cloudyu/Mixtral_11Bx2_MoE_19B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:40:23.223799(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of cloudyu/Mixtral_11Bx2_MoE_19B\n\n\n\nDataset automatically created during the evaluation run of model cloudyu/Mixtral_11Bx2_MoE_19B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:40:23.223799(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
0a968adde995c47de0e8a0a8f6778386fe861712
# Dataset Card for Evaluation run of logicker/SkkuDataScienceGlobal-10.7b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [logicker/SkkuDataScienceGlobal-10.7b](https://huggingface.co/logicker/SkkuDataScienceGlobal-10.7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_logicker__SkkuDataScienceGlobal-10.7b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:43:44.357190](https://huggingface.co/datasets/open-llm-leaderboard/details_logicker__SkkuDataScienceGlobal-10.7b/blob/main/results_2024-01-04T13-43-44.357190.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6670223395120712, "acc_stderr": 0.031613933491978684, "acc_norm": 0.6677265646501975, "acc_norm_stderr": 0.03225832144808963, "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314747, "mc2": 0.7192487275907953, "mc2_stderr": 0.015001105564856102 }, "harness|arc:challenge|25": { "acc": 0.6851535836177475, "acc_stderr": 0.01357265770308495, "acc_norm": 0.712457337883959, "acc_norm_stderr": 0.013226719056266125 }, "harness|hellaswag|10": { "acc": 0.7131049591714798, "acc_stderr": 0.004513877465062106, "acc_norm": 0.8840868352917746, "acc_norm_stderr": 0.003194665266078602 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.756578947368421, "acc_stderr": 0.034923496688842384, "acc_norm": 0.756578947368421, "acc_norm_stderr": 0.034923496688842384 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.0440844002276808, "acc_norm": 0.74, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956913, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956913 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.37254901960784315, "acc_stderr": 0.04810840148082636, "acc_norm": 0.37254901960784315, "acc_norm_stderr": 0.04810840148082636 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.625531914893617, "acc_stderr": 0.03163910665367291, "acc_norm": 0.625531914893617, "acc_norm_stderr": 0.03163910665367291 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6344827586206897, "acc_stderr": 0.040131241954243856, "acc_norm": 0.6344827586206897, "acc_norm_stderr": 0.040131241954243856 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4947089947089947, "acc_stderr": 0.02574986828855657, "acc_norm": 0.4947089947089947, "acc_norm_stderr": 0.02574986828855657 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8161290322580645, "acc_stderr": 0.022037217340267822, "acc_norm": 0.8161290322580645, "acc_norm_stderr": 0.022037217340267822 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.03517945038691063, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.806060606060606, "acc_stderr": 0.03087414513656209, "acc_norm": 0.806060606060606, "acc_norm_stderr": 0.03087414513656209 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603348, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603348 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563976, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563976 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.02944316932303154, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.02944316932303154 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7184873949579832, "acc_stderr": 0.02921354941437217, "acc_norm": 0.7184873949579832, "acc_norm_stderr": 0.02921354941437217 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5833333333333334, "acc_stderr": 0.033622774366080424, "acc_norm": 0.5833333333333334, "acc_norm_stderr": 0.033622774366080424 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156862, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156862 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8481012658227848, "acc_stderr": 0.023363878096632446, "acc_norm": 0.8481012658227848, "acc_norm_stderr": 0.023363878096632446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6816143497757847, "acc_stderr": 0.03126580522513713, "acc_norm": 0.6816143497757847, "acc_norm_stderr": 0.03126580522513713 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306086, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306086 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7768595041322314, "acc_stderr": 0.03800754475228733, "acc_norm": 0.7768595041322314, "acc_norm_stderr": 0.03800754475228733 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.033519538795212696, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.03492606476623791, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.03492606476623791 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.0230866350868414, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.0230866350868414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8045977011494253, "acc_stderr": 0.014179171373424383, "acc_norm": 0.8045977011494253, "acc_norm_stderr": 0.014179171373424383 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7601156069364162, "acc_stderr": 0.022989592543123563, "acc_norm": 0.7601156069364162, "acc_norm_stderr": 0.022989592543123563 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.394413407821229, "acc_stderr": 0.01634538676210397, "acc_norm": 0.394413407821229, "acc_norm_stderr": 0.01634538676210397 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7581699346405228, "acc_stderr": 0.024518195641879334, "acc_norm": 0.7581699346405228, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.729903536977492, "acc_stderr": 0.02521804037341062, "acc_norm": 0.729903536977492, "acc_norm_stderr": 0.02521804037341062 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0227797190887334, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0227797190887334 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4954367666232073, "acc_stderr": 0.012769704263117522, "acc_norm": 0.4954367666232073, "acc_norm_stderr": 0.012769704263117522 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7389705882352942, "acc_stderr": 0.026679252270103128, "acc_norm": 0.7389705882352942, "acc_norm_stderr": 0.026679252270103128 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6813725490196079, "acc_stderr": 0.018850084696468712, "acc_norm": 0.6813725490196079, "acc_norm_stderr": 0.018850084696468712 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7387755102040816, "acc_stderr": 0.028123429335142783, "acc_norm": 0.7387755102040816, "acc_norm_stderr": 0.028123429335142783 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8308457711442786, "acc_stderr": 0.02650859065623327, "acc_norm": 0.8308457711442786, "acc_norm_stderr": 0.02650859065623327 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.028762349126466125, "acc_norm": 0.91, "acc_norm_stderr": 0.028762349126466125 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598053, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598053 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03188578017686398, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03188578017686398 }, "harness|truthfulqa:mc|0": { "mc1": 0.5716034271725826, "mc1_stderr": 0.017323088597314747, "mc2": 0.7192487275907953, "mc2_stderr": 0.015001105564856102 }, "harness|winogrande|5": { "acc": 0.8334648776637726, "acc_stderr": 0.010470796496781093 }, "harness|gsm8k|5": { "acc": 0.6573161485974223, "acc_stderr": 0.013073030230827915 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_logicker__SkkuDataScienceGlobal-10.7b
[ "region:us" ]
2024-01-04T13:46:00+00:00
{"pretty_name": "Evaluation run of logicker/SkkuDataScienceGlobal-10.7b", "dataset_summary": "Dataset automatically created during the evaluation run of model [logicker/SkkuDataScienceGlobal-10.7b](https://huggingface.co/logicker/SkkuDataScienceGlobal-10.7b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_logicker__SkkuDataScienceGlobal-10.7b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:43:44.357190](https://huggingface.co/datasets/open-llm-leaderboard/details_logicker__SkkuDataScienceGlobal-10.7b/blob/main/results_2024-01-04T13-43-44.357190.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6670223395120712,\n \"acc_stderr\": 0.031613933491978684,\n \"acc_norm\": 0.6677265646501975,\n \"acc_norm_stderr\": 0.03225832144808963,\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314747,\n \"mc2\": 0.7192487275907953,\n \"mc2_stderr\": 0.015001105564856102\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6851535836177475,\n \"acc_stderr\": 0.01357265770308495,\n \"acc_norm\": 0.712457337883959,\n \"acc_norm_stderr\": 0.013226719056266125\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7131049591714798,\n \"acc_stderr\": 0.004513877465062106,\n \"acc_norm\": 0.8840868352917746,\n \"acc_norm_stderr\": 0.003194665266078602\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.756578947368421,\n \"acc_stderr\": 0.034923496688842384,\n \"acc_norm\": 0.756578947368421,\n \"acc_norm_stderr\": 0.034923496688842384\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956913,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956913\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.37254901960784315,\n \"acc_stderr\": 0.04810840148082636,\n \"acc_norm\": 0.37254901960784315,\n \"acc_norm_stderr\": 0.04810840148082636\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.625531914893617,\n \"acc_stderr\": 0.03163910665367291,\n \"acc_norm\": 0.625531914893617,\n \"acc_norm_stderr\": 0.03163910665367291\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6344827586206897,\n \"acc_stderr\": 0.040131241954243856,\n \"acc_norm\": 0.6344827586206897,\n \"acc_norm_stderr\": 0.040131241954243856\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4947089947089947,\n \"acc_stderr\": 0.02574986828855657,\n \"acc_norm\": 0.4947089947089947,\n \"acc_norm_stderr\": 0.02574986828855657\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8161290322580645,\n \"acc_stderr\": 0.022037217340267822,\n \"acc_norm\": 0.8161290322580645,\n \"acc_norm_stderr\": 0.022037217340267822\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.806060606060606,\n \"acc_stderr\": 0.03087414513656209,\n \"acc_norm\": 0.806060606060606,\n \"acc_norm_stderr\": 0.03087414513656209\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603348,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603348\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563976,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563976\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.02944316932303154,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.02944316932303154\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7184873949579832,\n \"acc_stderr\": 0.02921354941437217,\n \"acc_norm\": 0.7184873949579832,\n \"acc_norm_stderr\": 0.02921354941437217\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5833333333333334,\n \"acc_stderr\": 0.033622774366080424,\n \"acc_norm\": 0.5833333333333334,\n \"acc_norm_stderr\": 0.033622774366080424\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156862,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156862\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8481012658227848,\n \"acc_stderr\": 0.023363878096632446,\n \"acc_norm\": 0.8481012658227848,\n \"acc_norm_stderr\": 0.023363878096632446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6816143497757847,\n \"acc_stderr\": 0.03126580522513713,\n \"acc_norm\": 0.6816143497757847,\n \"acc_norm_stderr\": 0.03126580522513713\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306086,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306086\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7768595041322314,\n \"acc_stderr\": 0.03800754475228733,\n \"acc_norm\": 0.7768595041322314,\n \"acc_norm_stderr\": 0.03800754475228733\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.03492606476623791,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.03492606476623791\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.0230866350868414,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.0230866350868414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8045977011494253,\n \"acc_stderr\": 0.014179171373424383,\n \"acc_norm\": 0.8045977011494253,\n \"acc_norm_stderr\": 0.014179171373424383\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7601156069364162,\n \"acc_stderr\": 0.022989592543123563,\n \"acc_norm\": 0.7601156069364162,\n \"acc_norm_stderr\": 0.022989592543123563\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.394413407821229,\n \"acc_stderr\": 0.01634538676210397,\n \"acc_norm\": 0.394413407821229,\n \"acc_norm_stderr\": 0.01634538676210397\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7581699346405228,\n \"acc_stderr\": 0.024518195641879334,\n \"acc_norm\": 0.7581699346405228,\n \"acc_norm_stderr\": 0.024518195641879334\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.729903536977492,\n \"acc_stderr\": 0.02521804037341062,\n \"acc_norm\": 0.729903536977492,\n \"acc_norm_stderr\": 0.02521804037341062\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0227797190887334,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0227797190887334\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4954367666232073,\n \"acc_stderr\": 0.012769704263117522,\n \"acc_norm\": 0.4954367666232073,\n \"acc_norm_stderr\": 0.012769704263117522\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7389705882352942,\n \"acc_stderr\": 0.026679252270103128,\n \"acc_norm\": 0.7389705882352942,\n \"acc_norm_stderr\": 0.026679252270103128\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6813725490196079,\n \"acc_stderr\": 0.018850084696468712,\n \"acc_norm\": 0.6813725490196079,\n \"acc_norm_stderr\": 0.018850084696468712\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7387755102040816,\n \"acc_stderr\": 0.028123429335142783,\n \"acc_norm\": 0.7387755102040816,\n \"acc_norm_stderr\": 0.028123429335142783\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8308457711442786,\n \"acc_stderr\": 0.02650859065623327,\n \"acc_norm\": 0.8308457711442786,\n \"acc_norm_stderr\": 0.02650859065623327\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466125,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466125\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03188578017686398,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03188578017686398\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5716034271725826,\n \"mc1_stderr\": 0.017323088597314747,\n \"mc2\": 0.7192487275907953,\n \"mc2_stderr\": 0.015001105564856102\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8334648776637726,\n \"acc_stderr\": 0.010470796496781093\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6573161485974223,\n \"acc_stderr\": 0.013073030230827915\n }\n}\n```", "repo_url": "https://huggingface.co/logicker/SkkuDataScienceGlobal-10.7b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-43-44.357190.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["**/details_harness|winogrande|5_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-43-44.357190.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_43_44.357190", "path": ["results_2024-01-04T13-43-44.357190.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-43-44.357190.parquet"]}]}]}
2024-01-04T13:46:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of logicker/SkkuDataScienceGlobal-10.7b Dataset automatically created during the evaluation run of model logicker/SkkuDataScienceGlobal-10.7b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:43:44.357190(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of logicker/SkkuDataScienceGlobal-10.7b\n\n\n\nDataset automatically created during the evaluation run of model logicker/SkkuDataScienceGlobal-10.7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:43:44.357190(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of logicker/SkkuDataScienceGlobal-10.7b\n\n\n\nDataset automatically created during the evaluation run of model logicker/SkkuDataScienceGlobal-10.7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:43:44.357190(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 185, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of logicker/SkkuDataScienceGlobal-10.7b\n\n\n\nDataset automatically created during the evaluation run of model logicker/SkkuDataScienceGlobal-10.7b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:43:44.357190(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
5a6dabaeced23d47866541fc881b1807700f18ec
# Dataset Card for Evaluation run of NousResearch/Nous-Hermes-2-SOLAR-10.7B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [NousResearch/Nous-Hermes-2-SOLAR-10.7B](https://huggingface.co/NousResearch/Nous-Hermes-2-SOLAR-10.7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NousResearch__Nous-Hermes-2-SOLAR-10.7B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:44:46.879799](https://huggingface.co/datasets/open-llm-leaderboard/details_NousResearch__Nous-Hermes-2-SOLAR-10.7B/blob/main/results_2024-01-04T13-44-46.879799.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6655569621891528, "acc_stderr": 0.0315088972531857, "acc_norm": 0.6661789008110104, "acc_norm_stderr": 0.03215679079738553, "mc1": 0.3929008567931457, "mc1_stderr": 0.017097248285233065, "mc2": 0.5582372806316004, "mc2_stderr": 0.015330920960330282 }, "harness|arc:challenge|25": { "acc": 0.6305460750853242, "acc_stderr": 0.014104578366491892, "acc_norm": 0.6672354948805461, "acc_norm_stderr": 0.013769863046192304 }, "harness|hellaswag|10": { "acc": 0.6576379207329217, "acc_stderr": 0.004735302937476554, "acc_norm": 0.8489344752041426, "acc_norm_stderr": 0.0035738085511685365 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411021, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411021 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.562962962962963, "acc_stderr": 0.04284958639753401, "acc_norm": 0.562962962962963, "acc_norm_stderr": 0.04284958639753401 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7631578947368421, "acc_stderr": 0.03459777606810535, "acc_norm": 0.7631578947368421, "acc_norm_stderr": 0.03459777606810535 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.74, "acc_stderr": 0.044084400227680794, "acc_norm": 0.74, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.690566037735849, "acc_stderr": 0.02845015479411864, "acc_norm": 0.690566037735849, "acc_norm_stderr": 0.02845015479411864 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566018, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566018 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.04852365870939098, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939098 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6416184971098265, "acc_stderr": 0.03656343653353159, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.03656343653353159 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5957446808510638, "acc_stderr": 0.03208115750788684, "acc_norm": 0.5957446808510638, "acc_norm_stderr": 0.03208115750788684 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.543859649122807, "acc_stderr": 0.04685473041907789, "acc_norm": 0.543859649122807, "acc_norm_stderr": 0.04685473041907789 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5793103448275863, "acc_stderr": 0.0411391498118926, "acc_norm": 0.5793103448275863, "acc_norm_stderr": 0.0411391498118926 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4894179894179894, "acc_stderr": 0.025745542276045478, "acc_norm": 0.4894179894179894, "acc_norm_stderr": 0.025745542276045478 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621505, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8064516129032258, "acc_stderr": 0.022475258525536057, "acc_norm": 0.8064516129032258, "acc_norm_stderr": 0.022475258525536057 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5172413793103449, "acc_stderr": 0.03515895551165698, "acc_norm": 0.5172413793103449, "acc_norm_stderr": 0.03515895551165698 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8303030303030303, "acc_stderr": 0.02931118867498311, "acc_norm": 0.8303030303030303, "acc_norm_stderr": 0.02931118867498311 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8787878787878788, "acc_stderr": 0.023253157951942067, "acc_norm": 0.8787878787878788, "acc_norm_stderr": 0.023253157951942067 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6743589743589744, "acc_stderr": 0.02375966576741229, "acc_norm": 0.6743589743589744, "acc_norm_stderr": 0.02375966576741229 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.362962962962963, "acc_stderr": 0.02931820364520686, "acc_norm": 0.362962962962963, "acc_norm_stderr": 0.02931820364520686 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6932773109243697, "acc_stderr": 0.02995382389188704, "acc_norm": 0.6932773109243697, "acc_norm_stderr": 0.02995382389188704 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.0395802723112157, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.0395802723112157 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8513761467889909, "acc_stderr": 0.015251253773660836, "acc_norm": 0.8513761467889909, "acc_norm_stderr": 0.015251253773660836 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5277777777777778, "acc_stderr": 0.0340470532865388, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.025845017986926913, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.025845017986926913 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8776371308016878, "acc_stderr": 0.021331741829746786, "acc_norm": 0.8776371308016878, "acc_norm_stderr": 0.021331741829746786 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7488789237668162, "acc_stderr": 0.029105220833224622, "acc_norm": 0.7488789237668162, "acc_norm_stderr": 0.029105220833224622 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313729, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8181818181818182, "acc_stderr": 0.03520893951097653, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.03520893951097653 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.0395783547198098, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.0395783547198098 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489122, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489122 }, "harness|hendrycksTest-management|5": { "acc": 0.8058252427184466, "acc_stderr": 0.03916667762822584, "acc_norm": 0.8058252427184466, "acc_norm_stderr": 0.03916667762822584 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406964, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406964 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8275862068965517, "acc_stderr": 0.013507943909371812, "acc_norm": 0.8275862068965517, "acc_norm_stderr": 0.013507943909371812 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3486033519553073, "acc_stderr": 0.01593748465668703, "acc_norm": 0.3486033519553073, "acc_norm_stderr": 0.01593748465668703 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7777777777777778, "acc_stderr": 0.02380518652488813, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.02380518652488813 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7266881028938906, "acc_stderr": 0.02531176597542612, "acc_norm": 0.7266881028938906, "acc_norm_stderr": 0.02531176597542612 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7746913580246914, "acc_stderr": 0.02324620264781975, "acc_norm": 0.7746913580246914, "acc_norm_stderr": 0.02324620264781975 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5177304964539007, "acc_stderr": 0.02980873964223777, "acc_norm": 0.5177304964539007, "acc_norm_stderr": 0.02980873964223777 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.500651890482399, "acc_stderr": 0.012770225252255563, "acc_norm": 0.500651890482399, "acc_norm_stderr": 0.012770225252255563 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7573529411764706, "acc_stderr": 0.026040662474201257, "acc_norm": 0.7573529411764706, "acc_norm_stderr": 0.026040662474201257 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.684640522875817, "acc_stderr": 0.018798086284886887, "acc_norm": 0.684640522875817, "acc_norm_stderr": 0.018798086284886887 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7, "acc_stderr": 0.04389311454644287, "acc_norm": 0.7, "acc_norm_stderr": 0.04389311454644287 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7877551020408163, "acc_stderr": 0.026176967197866764, "acc_norm": 0.7877551020408163, "acc_norm_stderr": 0.026176967197866764 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.028762349126466108, "acc_norm": 0.91, "acc_norm_stderr": 0.028762349126466108 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.3929008567931457, "mc1_stderr": 0.017097248285233065, "mc2": 0.5582372806316004, "mc2_stderr": 0.015330920960330282 }, "harness|winogrande|5": { "acc": 0.8279400157853196, "acc_stderr": 0.010607731615246996 }, "harness|gsm8k|5": { "acc": 0.6944655041698257, "acc_stderr": 0.01268813407672688 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_NousResearch__Nous-Hermes-2-SOLAR-10.7B
[ "region:us" ]
2024-01-04T13:47:05+00:00
{"pretty_name": "Evaluation run of NousResearch/Nous-Hermes-2-SOLAR-10.7B", "dataset_summary": "Dataset automatically created during the evaluation run of model [NousResearch/Nous-Hermes-2-SOLAR-10.7B](https://huggingface.co/NousResearch/Nous-Hermes-2-SOLAR-10.7B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NousResearch__Nous-Hermes-2-SOLAR-10.7B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:44:46.879799](https://huggingface.co/datasets/open-llm-leaderboard/details_NousResearch__Nous-Hermes-2-SOLAR-10.7B/blob/main/results_2024-01-04T13-44-46.879799.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6655569621891528,\n \"acc_stderr\": 0.0315088972531857,\n \"acc_norm\": 0.6661789008110104,\n \"acc_norm_stderr\": 0.03215679079738553,\n \"mc1\": 0.3929008567931457,\n \"mc1_stderr\": 0.017097248285233065,\n \"mc2\": 0.5582372806316004,\n \"mc2_stderr\": 0.015330920960330282\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6305460750853242,\n \"acc_stderr\": 0.014104578366491892,\n \"acc_norm\": 0.6672354948805461,\n \"acc_norm_stderr\": 0.013769863046192304\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6576379207329217,\n \"acc_stderr\": 0.004735302937476554,\n \"acc_norm\": 0.8489344752041426,\n \"acc_norm_stderr\": 0.0035738085511685365\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411021,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411021\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.562962962962963,\n \"acc_stderr\": 0.04284958639753401,\n \"acc_norm\": 0.562962962962963,\n \"acc_norm_stderr\": 0.04284958639753401\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7631578947368421,\n \"acc_stderr\": 0.03459777606810535,\n \"acc_norm\": 0.7631578947368421,\n \"acc_norm_stderr\": 0.03459777606810535\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.690566037735849,\n \"acc_stderr\": 0.02845015479411864,\n \"acc_norm\": 0.690566037735849,\n \"acc_norm_stderr\": 0.02845015479411864\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n \"acc_stderr\": 0.03716177437566018,\n \"acc_norm\": 0.7291666666666666,\n \"acc_norm_stderr\": 0.03716177437566018\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939098,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939098\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.03656343653353159,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.03656343653353159\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5957446808510638,\n \"acc_stderr\": 0.03208115750788684,\n \"acc_norm\": 0.5957446808510638,\n \"acc_norm_stderr\": 0.03208115750788684\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.543859649122807,\n \"acc_stderr\": 0.04685473041907789,\n \"acc_norm\": 0.543859649122807,\n \"acc_norm_stderr\": 0.04685473041907789\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5793103448275863,\n \"acc_stderr\": 0.0411391498118926,\n \"acc_norm\": 0.5793103448275863,\n \"acc_norm_stderr\": 0.0411391498118926\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4894179894179894,\n \"acc_stderr\": 0.025745542276045478,\n \"acc_norm\": 0.4894179894179894,\n \"acc_norm_stderr\": 0.025745542276045478\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8064516129032258,\n \"acc_stderr\": 0.022475258525536057,\n \"acc_norm\": 0.8064516129032258,\n \"acc_norm_stderr\": 0.022475258525536057\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5172413793103449,\n \"acc_stderr\": 0.03515895551165698,\n \"acc_norm\": 0.5172413793103449,\n \"acc_norm_stderr\": 0.03515895551165698\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8303030303030303,\n \"acc_stderr\": 0.02931118867498311,\n \"acc_norm\": 0.8303030303030303,\n \"acc_norm_stderr\": 0.02931118867498311\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8787878787878788,\n \"acc_stderr\": 0.023253157951942067,\n \"acc_norm\": 0.8787878787878788,\n \"acc_norm_stderr\": 0.023253157951942067\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6743589743589744,\n \"acc_stderr\": 0.02375966576741229,\n \"acc_norm\": 0.6743589743589744,\n \"acc_norm_stderr\": 0.02375966576741229\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.362962962962963,\n \"acc_stderr\": 0.02931820364520686,\n \"acc_norm\": 0.362962962962963,\n \"acc_norm_stderr\": 0.02931820364520686\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6932773109243697,\n \"acc_stderr\": 0.02995382389188704,\n \"acc_norm\": 0.6932773109243697,\n \"acc_norm_stderr\": 0.02995382389188704\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.0395802723112157,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.0395802723112157\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8513761467889909,\n \"acc_stderr\": 0.015251253773660836,\n \"acc_norm\": 0.8513761467889909,\n \"acc_norm_stderr\": 0.015251253773660836\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926913,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926913\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8776371308016878,\n \"acc_stderr\": 0.021331741829746786,\n \"acc_norm\": 0.8776371308016878,\n \"acc_norm_stderr\": 0.021331741829746786\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7488789237668162,\n \"acc_stderr\": 0.029105220833224622,\n \"acc_norm\": 0.7488789237668162,\n \"acc_norm_stderr\": 0.029105220833224622\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.03520893951097653,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.03520893951097653\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.0395783547198098,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.0395783547198098\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489122,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489122\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8058252427184466,\n \"acc_stderr\": 0.03916667762822584,\n \"acc_norm\": 0.8058252427184466,\n \"acc_norm_stderr\": 0.03916667762822584\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8275862068965517,\n \"acc_stderr\": 0.013507943909371812,\n \"acc_norm\": 0.8275862068965517,\n \"acc_norm_stderr\": 0.013507943909371812\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3486033519553073,\n \"acc_stderr\": 0.01593748465668703,\n \"acc_norm\": 0.3486033519553073,\n \"acc_norm_stderr\": 0.01593748465668703\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.02380518652488813,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.02380518652488813\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7266881028938906,\n \"acc_stderr\": 0.02531176597542612,\n \"acc_norm\": 0.7266881028938906,\n \"acc_norm_stderr\": 0.02531176597542612\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7746913580246914,\n \"acc_stderr\": 0.02324620264781975,\n \"acc_norm\": 0.7746913580246914,\n \"acc_norm_stderr\": 0.02324620264781975\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5177304964539007,\n \"acc_stderr\": 0.02980873964223777,\n \"acc_norm\": 0.5177304964539007,\n \"acc_norm_stderr\": 0.02980873964223777\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.500651890482399,\n \"acc_stderr\": 0.012770225252255563,\n \"acc_norm\": 0.500651890482399,\n \"acc_norm_stderr\": 0.012770225252255563\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7573529411764706,\n \"acc_stderr\": 0.026040662474201257,\n \"acc_norm\": 0.7573529411764706,\n \"acc_norm_stderr\": 0.026040662474201257\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.684640522875817,\n \"acc_stderr\": 0.018798086284886887,\n \"acc_norm\": 0.684640522875817,\n \"acc_norm_stderr\": 0.018798086284886887\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.04389311454644287,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.04389311454644287\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7877551020408163,\n \"acc_stderr\": 0.026176967197866764,\n \"acc_norm\": 0.7877551020408163,\n \"acc_norm_stderr\": 0.026176967197866764\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466108,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466108\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3929008567931457,\n \"mc1_stderr\": 0.017097248285233065,\n \"mc2\": 0.5582372806316004,\n \"mc2_stderr\": 0.015330920960330282\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8279400157853196,\n \"acc_stderr\": 0.010607731615246996\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6944655041698257,\n \"acc_stderr\": 0.01268813407672688\n }\n}\n```", "repo_url": "https://huggingface.co/NousResearch/Nous-Hermes-2-SOLAR-10.7B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-44-46.879799.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["**/details_harness|winogrande|5_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-44-46.879799.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_44_46.879799", "path": ["results_2024-01-04T13-44-46.879799.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-44-46.879799.parquet"]}]}]}
2024-01-04T13:47:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of NousResearch/Nous-Hermes-2-SOLAR-10.7B Dataset automatically created during the evaluation run of model NousResearch/Nous-Hermes-2-SOLAR-10.7B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:44:46.879799(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of NousResearch/Nous-Hermes-2-SOLAR-10.7B\n\n\n\nDataset automatically created during the evaluation run of model NousResearch/Nous-Hermes-2-SOLAR-10.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:44:46.879799(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of NousResearch/Nous-Hermes-2-SOLAR-10.7B\n\n\n\nDataset automatically created during the evaluation run of model NousResearch/Nous-Hermes-2-SOLAR-10.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:44:46.879799(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of NousResearch/Nous-Hermes-2-SOLAR-10.7B\n\n\n\nDataset automatically created during the evaluation run of model NousResearch/Nous-Hermes-2-SOLAR-10.7B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:44:46.879799(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
fd35b3bc4b52ac98b50d38aed2c44fb0fe19d431
# Dataset Card for Evaluation run of dfurman/Mistral-7B-Instruct-v0.2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [dfurman/Mistral-7B-Instruct-v0.2](https://huggingface.co/dfurman/Mistral-7B-Instruct-v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_dfurman__Mistral-7B-Instruct-v0.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T13:48:31.156343](https://huggingface.co/datasets/open-llm-leaderboard/details_dfurman__Mistral-7B-Instruct-v0.2/blob/main/results_2024-01-04T13-48-31.156343.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5992297441817553, "acc_stderr": 0.0331552136448952, "acc_norm": 0.6045877697767505, "acc_norm_stderr": 0.03383492909086883, "mc1": 0.408812729498164, "mc1_stderr": 0.01720995215164173, "mc2": 0.5605816288697437, "mc2_stderr": 0.015503229959649428 }, "harness|arc:challenge|25": { "acc": 0.5597269624573379, "acc_stderr": 0.014506769524804234, "acc_norm": 0.6015358361774744, "acc_norm_stderr": 0.014306946052735563 }, "harness|hellaswag|10": { "acc": 0.6353316072495518, "acc_stderr": 0.004803533333364223, "acc_norm": 0.8279227245568612, "acc_norm_stderr": 0.00376676198331935 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5703703703703704, "acc_stderr": 0.04276349494376599, "acc_norm": 0.5703703703703704, "acc_norm_stderr": 0.04276349494376599 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.03738520676119669, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.03738520676119669 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.55, "acc_stderr": 0.049999999999999996, "acc_norm": 0.55, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6452830188679245, "acc_stderr": 0.02944517532819959, "acc_norm": 0.6452830188679245, "acc_norm_stderr": 0.02944517532819959 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6458333333333334, "acc_stderr": 0.039994111357535424, "acc_norm": 0.6458333333333334, "acc_norm_stderr": 0.039994111357535424 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956911, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5838150289017341, "acc_stderr": 0.03758517775404947, "acc_norm": 0.5838150289017341, "acc_norm_stderr": 0.03758517775404947 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.04897104952726366, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.04897104952726366 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768078, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768078 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5361702127659574, "acc_stderr": 0.032600385118357715, "acc_norm": 0.5361702127659574, "acc_norm_stderr": 0.032600385118357715 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4298245614035088, "acc_stderr": 0.04657047260594963, "acc_norm": 0.4298245614035088, "acc_norm_stderr": 0.04657047260594963 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5310344827586206, "acc_stderr": 0.04158632762097828, "acc_norm": 0.5310344827586206, "acc_norm_stderr": 0.04158632762097828 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3888888888888889, "acc_stderr": 0.02510742548113729, "acc_norm": 0.3888888888888889, "acc_norm_stderr": 0.02510742548113729 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7225806451612903, "acc_stderr": 0.025470196835900055, "acc_norm": 0.7225806451612903, "acc_norm_stderr": 0.025470196835900055 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4630541871921182, "acc_stderr": 0.035083705204426656, "acc_norm": 0.4630541871921182, "acc_norm_stderr": 0.035083705204426656 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.67, "acc_stderr": 0.04725815626252609, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252609 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7333333333333333, "acc_stderr": 0.03453131801885417, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03453131801885417 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7272727272727273, "acc_stderr": 0.03173071239071724, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.03173071239071724 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8497409326424871, "acc_stderr": 0.02578772318072387, "acc_norm": 0.8497409326424871, "acc_norm_stderr": 0.02578772318072387 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6205128205128205, "acc_stderr": 0.024603626924097417, "acc_norm": 0.6205128205128205, "acc_norm_stderr": 0.024603626924097417 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948492, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948492 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.5840336134453782, "acc_stderr": 0.032016501007396114, "acc_norm": 0.5840336134453782, "acc_norm_stderr": 0.032016501007396114 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7889908256880734, "acc_stderr": 0.017493922404112648, "acc_norm": 0.7889908256880734, "acc_norm_stderr": 0.017493922404112648 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.46296296296296297, "acc_stderr": 0.03400603625538271, "acc_norm": 0.46296296296296297, "acc_norm_stderr": 0.03400603625538271 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7745098039215687, "acc_stderr": 0.029331162294251735, "acc_norm": 0.7745098039215687, "acc_norm_stderr": 0.029331162294251735 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.759493670886076, "acc_stderr": 0.027820781981149685, "acc_norm": 0.759493670886076, "acc_norm_stderr": 0.027820781981149685 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6547085201793722, "acc_stderr": 0.03191100192835794, "acc_norm": 0.6547085201793722, "acc_norm_stderr": 0.03191100192835794 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7022900763358778, "acc_stderr": 0.040103589424622034, "acc_norm": 0.7022900763358778, "acc_norm_stderr": 0.040103589424622034 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6759259259259259, "acc_stderr": 0.045245960070300476, "acc_norm": 0.6759259259259259, "acc_norm_stderr": 0.045245960070300476 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7177914110429447, "acc_stderr": 0.03536117886664742, "acc_norm": 0.7177914110429447, "acc_norm_stderr": 0.03536117886664742 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.45535714285714285, "acc_stderr": 0.04726835553719099, "acc_norm": 0.45535714285714285, "acc_norm_stderr": 0.04726835553719099 }, "harness|hendrycksTest-management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.022801382534597528, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.022801382534597528 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7867177522349936, "acc_stderr": 0.014648172749593513, "acc_norm": 0.7867177522349936, "acc_norm_stderr": 0.014648172749593513 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6907514450867052, "acc_stderr": 0.02488314057007176, "acc_norm": 0.6907514450867052, "acc_norm_stderr": 0.02488314057007176 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.22681564245810057, "acc_stderr": 0.014005843570897888, "acc_norm": 0.22681564245810057, "acc_norm_stderr": 0.014005843570897888 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6862745098039216, "acc_stderr": 0.026568921015457152, "acc_norm": 0.6862745098039216, "acc_norm_stderr": 0.026568921015457152 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7041800643086816, "acc_stderr": 0.025922371788818763, "acc_norm": 0.7041800643086816, "acc_norm_stderr": 0.025922371788818763 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6975308641975309, "acc_stderr": 0.02555765398186806, "acc_norm": 0.6975308641975309, "acc_norm_stderr": 0.02555765398186806 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4511082138200782, "acc_stderr": 0.012709037347346233, "acc_norm": 0.4511082138200782, "acc_norm_stderr": 0.012709037347346233 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6544117647058824, "acc_stderr": 0.028888193103988637, "acc_norm": 0.6544117647058824, "acc_norm_stderr": 0.028888193103988637 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6470588235294118, "acc_stderr": 0.019333142020797167, "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.019333142020797167 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6040816326530613, "acc_stderr": 0.03130802899065686, "acc_norm": 0.6040816326530613, "acc_norm_stderr": 0.03130802899065686 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8059701492537313, "acc_stderr": 0.027962677604768907, "acc_norm": 0.8059701492537313, "acc_norm_stderr": 0.027962677604768907 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.84, "acc_stderr": 0.03684529491774708, "acc_norm": 0.84, "acc_norm_stderr": 0.03684529491774708 }, "harness|hendrycksTest-virology|5": { "acc": 0.463855421686747, "acc_stderr": 0.03882310850890593, "acc_norm": 0.463855421686747, "acc_norm_stderr": 0.03882310850890593 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7953216374269005, "acc_stderr": 0.030944459778533207, "acc_norm": 0.7953216374269005, "acc_norm_stderr": 0.030944459778533207 }, "harness|truthfulqa:mc|0": { "mc1": 0.408812729498164, "mc1_stderr": 0.01720995215164173, "mc2": 0.5605816288697437, "mc2_stderr": 0.015503229959649428 }, "harness|winogrande|5": { "acc": 0.7687450670876085, "acc_stderr": 0.01185004012485051 }, "harness|gsm8k|5": { "acc": 0.3479909021986353, "acc_stderr": 0.013120581030382132 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_dfurman__Mistral-7B-Instruct-v0.2
[ "region:us" ]
2024-01-04T13:50:50+00:00
{"pretty_name": "Evaluation run of dfurman/Mistral-7B-Instruct-v0.2", "dataset_summary": "Dataset automatically created during the evaluation run of model [dfurman/Mistral-7B-Instruct-v0.2](https://huggingface.co/dfurman/Mistral-7B-Instruct-v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_dfurman__Mistral-7B-Instruct-v0.2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T13:48:31.156343](https://huggingface.co/datasets/open-llm-leaderboard/details_dfurman__Mistral-7B-Instruct-v0.2/blob/main/results_2024-01-04T13-48-31.156343.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5992297441817553,\n \"acc_stderr\": 0.0331552136448952,\n \"acc_norm\": 0.6045877697767505,\n \"acc_norm_stderr\": 0.03383492909086883,\n \"mc1\": 0.408812729498164,\n \"mc1_stderr\": 0.01720995215164173,\n \"mc2\": 0.5605816288697437,\n \"mc2_stderr\": 0.015503229959649428\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5597269624573379,\n \"acc_stderr\": 0.014506769524804234,\n \"acc_norm\": 0.6015358361774744,\n \"acc_norm_stderr\": 0.014306946052735563\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6353316072495518,\n \"acc_stderr\": 0.004803533333364223,\n \"acc_norm\": 0.8279227245568612,\n \"acc_norm_stderr\": 0.00376676198331935\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n \"acc_stderr\": 0.04276349494376599,\n \"acc_norm\": 0.5703703703703704,\n \"acc_norm_stderr\": 0.04276349494376599\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.03738520676119669,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.03738520676119669\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6452830188679245,\n \"acc_stderr\": 0.02944517532819959,\n \"acc_norm\": 0.6452830188679245,\n \"acc_norm_stderr\": 0.02944517532819959\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6458333333333334,\n \"acc_stderr\": 0.039994111357535424,\n \"acc_norm\": 0.6458333333333334,\n \"acc_norm_stderr\": 0.039994111357535424\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5838150289017341,\n \"acc_stderr\": 0.03758517775404947,\n \"acc_norm\": 0.5838150289017341,\n \"acc_norm_stderr\": 0.03758517775404947\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.04897104952726366,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.04897104952726366\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768078,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768078\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5361702127659574,\n \"acc_stderr\": 0.032600385118357715,\n \"acc_norm\": 0.5361702127659574,\n \"acc_norm_stderr\": 0.032600385118357715\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4298245614035088,\n \"acc_stderr\": 0.04657047260594963,\n \"acc_norm\": 0.4298245614035088,\n \"acc_norm_stderr\": 0.04657047260594963\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5310344827586206,\n \"acc_stderr\": 0.04158632762097828,\n \"acc_norm\": 0.5310344827586206,\n \"acc_norm_stderr\": 0.04158632762097828\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3888888888888889,\n \"acc_stderr\": 0.02510742548113729,\n \"acc_norm\": 0.3888888888888889,\n \"acc_norm_stderr\": 0.02510742548113729\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n \"acc_stderr\": 0.04306241259127153,\n \"acc_norm\": 0.36507936507936506,\n \"acc_norm_stderr\": 0.04306241259127153\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7225806451612903,\n \"acc_stderr\": 0.025470196835900055,\n \"acc_norm\": 0.7225806451612903,\n \"acc_norm_stderr\": 0.025470196835900055\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4630541871921182,\n \"acc_stderr\": 0.035083705204426656,\n \"acc_norm\": 0.4630541871921182,\n \"acc_norm_stderr\": 0.035083705204426656\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252609\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.03453131801885417,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03453131801885417\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.03173071239071724,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.03173071239071724\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8497409326424871,\n \"acc_stderr\": 0.02578772318072387,\n \"acc_norm\": 0.8497409326424871,\n \"acc_norm_stderr\": 0.02578772318072387\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6205128205128205,\n \"acc_stderr\": 0.024603626924097417,\n \"acc_norm\": 0.6205128205128205,\n \"acc_norm_stderr\": 0.024603626924097417\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948492,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948492\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.5840336134453782,\n \"acc_stderr\": 0.032016501007396114,\n \"acc_norm\": 0.5840336134453782,\n \"acc_norm_stderr\": 0.032016501007396114\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7889908256880734,\n \"acc_stderr\": 0.017493922404112648,\n \"acc_norm\": 0.7889908256880734,\n \"acc_norm_stderr\": 0.017493922404112648\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.46296296296296297,\n \"acc_stderr\": 0.03400603625538271,\n \"acc_norm\": 0.46296296296296297,\n \"acc_norm_stderr\": 0.03400603625538271\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7745098039215687,\n \"acc_stderr\": 0.029331162294251735,\n \"acc_norm\": 0.7745098039215687,\n \"acc_norm_stderr\": 0.029331162294251735\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.759493670886076,\n \"acc_stderr\": 0.027820781981149685,\n \"acc_norm\": 0.759493670886076,\n \"acc_norm_stderr\": 0.027820781981149685\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6547085201793722,\n \"acc_stderr\": 0.03191100192835794,\n \"acc_norm\": 0.6547085201793722,\n \"acc_norm_stderr\": 0.03191100192835794\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7022900763358778,\n \"acc_stderr\": 0.040103589424622034,\n \"acc_norm\": 0.7022900763358778,\n \"acc_norm_stderr\": 0.040103589424622034\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6759259259259259,\n \"acc_stderr\": 0.045245960070300476,\n \"acc_norm\": 0.6759259259259259,\n \"acc_norm_stderr\": 0.045245960070300476\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7177914110429447,\n \"acc_stderr\": 0.03536117886664742,\n \"acc_norm\": 0.7177914110429447,\n \"acc_norm_stderr\": 0.03536117886664742\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.45535714285714285,\n \"acc_stderr\": 0.04726835553719099,\n \"acc_norm\": 0.45535714285714285,\n \"acc_norm_stderr\": 0.04726835553719099\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.022801382534597528,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.022801382534597528\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7867177522349936,\n \"acc_stderr\": 0.014648172749593513,\n \"acc_norm\": 0.7867177522349936,\n \"acc_norm_stderr\": 0.014648172749593513\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6907514450867052,\n \"acc_stderr\": 0.02488314057007176,\n \"acc_norm\": 0.6907514450867052,\n \"acc_norm_stderr\": 0.02488314057007176\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.22681564245810057,\n \"acc_stderr\": 0.014005843570897888,\n \"acc_norm\": 0.22681564245810057,\n \"acc_norm_stderr\": 0.014005843570897888\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6862745098039216,\n \"acc_stderr\": 0.026568921015457152,\n \"acc_norm\": 0.6862745098039216,\n \"acc_norm_stderr\": 0.026568921015457152\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7041800643086816,\n \"acc_stderr\": 0.025922371788818763,\n \"acc_norm\": 0.7041800643086816,\n \"acc_norm_stderr\": 0.025922371788818763\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6975308641975309,\n \"acc_stderr\": 0.02555765398186806,\n \"acc_norm\": 0.6975308641975309,\n \"acc_norm_stderr\": 0.02555765398186806\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4511082138200782,\n \"acc_stderr\": 0.012709037347346233,\n \"acc_norm\": 0.4511082138200782,\n \"acc_norm_stderr\": 0.012709037347346233\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6544117647058824,\n \"acc_stderr\": 0.028888193103988637,\n \"acc_norm\": 0.6544117647058824,\n \"acc_norm_stderr\": 0.028888193103988637\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.019333142020797167,\n \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.019333142020797167\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6040816326530613,\n \"acc_stderr\": 0.03130802899065686,\n \"acc_norm\": 0.6040816326530613,\n \"acc_norm_stderr\": 0.03130802899065686\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8059701492537313,\n \"acc_stderr\": 0.027962677604768907,\n \"acc_norm\": 0.8059701492537313,\n \"acc_norm_stderr\": 0.027962677604768907\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.84,\n \"acc_stderr\": 0.03684529491774708,\n \"acc_norm\": 0.84,\n \"acc_norm_stderr\": 0.03684529491774708\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.463855421686747,\n \"acc_stderr\": 0.03882310850890593,\n \"acc_norm\": 0.463855421686747,\n \"acc_norm_stderr\": 0.03882310850890593\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7953216374269005,\n \"acc_stderr\": 0.030944459778533207,\n \"acc_norm\": 0.7953216374269005,\n \"acc_norm_stderr\": 0.030944459778533207\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.408812729498164,\n \"mc1_stderr\": 0.01720995215164173,\n \"mc2\": 0.5605816288697437,\n \"mc2_stderr\": 0.015503229959649428\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7687450670876085,\n \"acc_stderr\": 0.01185004012485051\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3479909021986353,\n \"acc_stderr\": 0.013120581030382132\n }\n}\n```", "repo_url": "https://huggingface.co/dfurman/Mistral-7B-Instruct-v0.2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-48-31.156343.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["**/details_harness|winogrande|5_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T13-48-31.156343.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_48_31.156343", "path": ["results_2024-01-04T13-48-31.156343.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T13-48-31.156343.parquet"]}]}]}
2024-01-04T13:51:13+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of dfurman/Mistral-7B-Instruct-v0.2 Dataset automatically created during the evaluation run of model dfurman/Mistral-7B-Instruct-v0.2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T13:48:31.156343(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of dfurman/Mistral-7B-Instruct-v0.2\n\n\n\nDataset automatically created during the evaluation run of model dfurman/Mistral-7B-Instruct-v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:48:31.156343(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of dfurman/Mistral-7B-Instruct-v0.2\n\n\n\nDataset automatically created during the evaluation run of model dfurman/Mistral-7B-Instruct-v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T13:48:31.156343(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of dfurman/Mistral-7B-Instruct-v0.2\n\n\n\nDataset automatically created during the evaluation run of model dfurman/Mistral-7B-Instruct-v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T13:48:31.156343(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
802fa5c1b7edc5591f38bed8db8e5584801d7d09
# Dataset Card for Evaluation run of r2rss/Malachite-7b-v0 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [r2rss/Malachite-7b-v0](https://huggingface.co/r2rss/Malachite-7b-v0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_r2rss__Malachite-7b-v0", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-05T00:32:20.869306](https://huggingface.co/datasets/open-llm-leaderboard/details_r2rss__Malachite-7b-v0/blob/main/results_2024-01-05T00-32-20.869306.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6348246683206857, "acc_stderr": 0.0322902065855879, "acc_norm": 0.6395048543996246, "acc_norm_stderr": 0.03293880266402491, "mc1": 0.4724602203182375, "mc1_stderr": 0.017476930190712187, "mc2": 0.644904243585318, "mc2_stderr": 0.014938833999403515 }, "harness|arc:challenge|25": { "acc": 0.6245733788395904, "acc_stderr": 0.014150631435111728, "acc_norm": 0.6774744027303754, "acc_norm_stderr": 0.01365998089427737 }, "harness|hellaswag|10": { "acc": 0.6227843059151563, "acc_stderr": 0.004836990373261567, "acc_norm": 0.8365863373829915, "acc_norm_stderr": 0.0036898701424130753 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6907894736842105, "acc_stderr": 0.037610708698674805, "acc_norm": 0.6907894736842105, "acc_norm_stderr": 0.037610708698674805 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.63, "acc_stderr": 0.048523658709391, "acc_norm": 0.63, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6792452830188679, "acc_stderr": 0.028727502957880267, "acc_norm": 0.6792452830188679, "acc_norm_stderr": 0.028727502957880267 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.03586879280080341, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542126, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542126 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4411764705882353, "acc_stderr": 0.049406356306056595, "acc_norm": 0.4411764705882353, "acc_norm_stderr": 0.049406356306056595 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5787234042553191, "acc_stderr": 0.03227834510146267, "acc_norm": 0.5787234042553191, "acc_norm_stderr": 0.03227834510146267 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6275862068965518, "acc_stderr": 0.04028731532947558, "acc_norm": 0.6275862068965518, "acc_norm_stderr": 0.04028731532947558 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4417989417989418, "acc_stderr": 0.02557625706125383, "acc_norm": 0.4417989417989418, "acc_norm_stderr": 0.02557625706125383 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04444444444444449, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04444444444444449 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7709677419354839, "acc_stderr": 0.02390491431178265, "acc_norm": 0.7709677419354839, "acc_norm_stderr": 0.02390491431178265 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.035179450386910616, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.035179450386910616 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7575757575757576, "acc_stderr": 0.03346409881055953, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.03346409881055953 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.02886977846026705, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.02886977846026705 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8756476683937824, "acc_stderr": 0.02381447708659357, "acc_norm": 0.8756476683937824, "acc_norm_stderr": 0.02381447708659357 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6512820512820513, "acc_stderr": 0.02416278028401772, "acc_norm": 0.6512820512820513, "acc_norm_stderr": 0.02416278028401772 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3111111111111111, "acc_stderr": 0.028226446749683512, "acc_norm": 0.3111111111111111, "acc_norm_stderr": 0.028226446749683512 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6470588235294118, "acc_stderr": 0.031041941304059278, "acc_norm": 0.6470588235294118, "acc_norm_stderr": 0.031041941304059278 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.03861557546255169, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.03861557546255169 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8330275229357799, "acc_stderr": 0.01599015488507338, "acc_norm": 0.8330275229357799, "acc_norm_stderr": 0.01599015488507338 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4722222222222222, "acc_stderr": 0.0340470532865388, "acc_norm": 0.4722222222222222, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7843137254901961, "acc_stderr": 0.028867431449849316, "acc_norm": 0.7843137254901961, "acc_norm_stderr": 0.028867431449849316 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8059071729957806, "acc_stderr": 0.025744902532290913, "acc_norm": 0.8059071729957806, "acc_norm_stderr": 0.025744902532290913 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.030769352008229146, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.030769352008229146 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8015267175572519, "acc_stderr": 0.03498149385462472, "acc_norm": 0.8015267175572519, "acc_norm_stderr": 0.03498149385462472 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.03849856098794088, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.03849856098794088 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7129629629629629, "acc_stderr": 0.043733130409147614, "acc_norm": 0.7129629629629629, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7572815533980582, "acc_stderr": 0.04245022486384493, "acc_norm": 0.7572815533980582, "acc_norm_stderr": 0.04245022486384493 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406964, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406964 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8301404853128991, "acc_stderr": 0.013428186370608294, "acc_norm": 0.8301404853128991, "acc_norm_stderr": 0.013428186370608294 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7138728323699421, "acc_stderr": 0.02433214677913413, "acc_norm": 0.7138728323699421, "acc_norm_stderr": 0.02433214677913413 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.41787709497206704, "acc_stderr": 0.016495400635820084, "acc_norm": 0.41787709497206704, "acc_norm_stderr": 0.016495400635820084 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6993464052287581, "acc_stderr": 0.02625605383571896, "acc_norm": 0.6993464052287581, "acc_norm_stderr": 0.02625605383571896 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6720257234726688, "acc_stderr": 0.026664410886937624, "acc_norm": 0.6720257234726688, "acc_norm_stderr": 0.026664410886937624 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7345679012345679, "acc_stderr": 0.024569223600460845, "acc_norm": 0.7345679012345679, "acc_norm_stderr": 0.024569223600460845 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.45390070921985815, "acc_stderr": 0.029700453247291474, "acc_norm": 0.45390070921985815, "acc_norm_stderr": 0.029700453247291474 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.41851368970013036, "acc_stderr": 0.012599505608336472, "acc_norm": 0.41851368970013036, "acc_norm_stderr": 0.012599505608336472 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.625, "acc_stderr": 0.029408372932278746, "acc_norm": 0.625, "acc_norm_stderr": 0.029408372932278746 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6666666666666666, "acc_stderr": 0.0190709855896875, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.0190709855896875 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6636363636363637, "acc_stderr": 0.04525393596302506, "acc_norm": 0.6636363636363637, "acc_norm_stderr": 0.04525393596302506 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7142857142857143, "acc_stderr": 0.028920583220675606, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.028920583220675606 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8656716417910447, "acc_stderr": 0.024112678240900798, "acc_norm": 0.8656716417910447, "acc_norm_stderr": 0.024112678240900798 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699122, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699122 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8538011695906432, "acc_stderr": 0.02709729011807082, "acc_norm": 0.8538011695906432, "acc_norm_stderr": 0.02709729011807082 }, "harness|truthfulqa:mc|0": { "mc1": 0.4724602203182375, "mc1_stderr": 0.017476930190712187, "mc2": 0.644904243585318, "mc2_stderr": 0.014938833999403515 }, "harness|winogrande|5": { "acc": 0.8121546961325967, "acc_stderr": 0.010977481103435091 }, "harness|gsm8k|5": { "acc": 0.44806671721000757, "acc_stderr": 0.013697992668274525 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_r2rss__Malachite-7b-v0
[ "region:us" ]
2024-01-04T13:53:11+00:00
{"pretty_name": "Evaluation run of r2rss/Malachite-7b-v0", "dataset_summary": "Dataset automatically created during the evaluation run of model [r2rss/Malachite-7b-v0](https://huggingface.co/r2rss/Malachite-7b-v0) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_r2rss__Malachite-7b-v0\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-05T00:32:20.869306](https://huggingface.co/datasets/open-llm-leaderboard/details_r2rss__Malachite-7b-v0/blob/main/results_2024-01-05T00-32-20.869306.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6348246683206857,\n \"acc_stderr\": 0.0322902065855879,\n \"acc_norm\": 0.6395048543996246,\n \"acc_norm_stderr\": 0.03293880266402491,\n \"mc1\": 0.4724602203182375,\n \"mc1_stderr\": 0.017476930190712187,\n \"mc2\": 0.644904243585318,\n \"mc2_stderr\": 0.014938833999403515\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6245733788395904,\n \"acc_stderr\": 0.014150631435111728,\n \"acc_norm\": 0.6774744027303754,\n \"acc_norm_stderr\": 0.01365998089427737\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6227843059151563,\n \"acc_stderr\": 0.004836990373261567,\n \"acc_norm\": 0.8365863373829915,\n \"acc_norm_stderr\": 0.0036898701424130753\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6907894736842105,\n \"acc_stderr\": 0.037610708698674805,\n \"acc_norm\": 0.6907894736842105,\n \"acc_norm_stderr\": 0.037610708698674805\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.63,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.63,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6792452830188679,\n \"acc_stderr\": 0.028727502957880267,\n \"acc_norm\": 0.6792452830188679,\n \"acc_norm_stderr\": 0.028727502957880267\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542126,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542126\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4411764705882353,\n \"acc_stderr\": 0.049406356306056595,\n \"acc_norm\": 0.4411764705882353,\n \"acc_norm_stderr\": 0.049406356306056595\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5787234042553191,\n \"acc_stderr\": 0.03227834510146267,\n \"acc_norm\": 0.5787234042553191,\n \"acc_norm_stderr\": 0.03227834510146267\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6275862068965518,\n \"acc_stderr\": 0.04028731532947558,\n \"acc_norm\": 0.6275862068965518,\n \"acc_norm_stderr\": 0.04028731532947558\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4417989417989418,\n \"acc_stderr\": 0.02557625706125383,\n \"acc_norm\": 0.4417989417989418,\n \"acc_norm_stderr\": 0.02557625706125383\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.04444444444444449,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.04444444444444449\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7709677419354839,\n \"acc_stderr\": 0.02390491431178265,\n \"acc_norm\": 0.7709677419354839,\n \"acc_norm_stderr\": 0.02390491431178265\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.035179450386910616,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.035179450386910616\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.03346409881055953,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.03346409881055953\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.02886977846026705,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.02886977846026705\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8756476683937824,\n \"acc_stderr\": 0.02381447708659357,\n \"acc_norm\": 0.8756476683937824,\n \"acc_norm_stderr\": 0.02381447708659357\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6512820512820513,\n \"acc_stderr\": 0.02416278028401772,\n \"acc_norm\": 0.6512820512820513,\n \"acc_norm_stderr\": 0.02416278028401772\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3111111111111111,\n \"acc_stderr\": 0.028226446749683512,\n \"acc_norm\": 0.3111111111111111,\n \"acc_norm_stderr\": 0.028226446749683512\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6470588235294118,\n \"acc_stderr\": 0.031041941304059278,\n \"acc_norm\": 0.6470588235294118,\n \"acc_norm_stderr\": 0.031041941304059278\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.03861557546255169,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.03861557546255169\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8330275229357799,\n \"acc_stderr\": 0.01599015488507338,\n \"acc_norm\": 0.8330275229357799,\n \"acc_norm_stderr\": 0.01599015488507338\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4722222222222222,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.4722222222222222,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7843137254901961,\n \"acc_stderr\": 0.028867431449849316,\n \"acc_norm\": 0.7843137254901961,\n \"acc_norm_stderr\": 0.028867431449849316\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8059071729957806,\n \"acc_stderr\": 0.025744902532290913,\n \"acc_norm\": 0.8059071729957806,\n \"acc_norm_stderr\": 0.025744902532290913\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n \"acc_stderr\": 0.030769352008229146,\n \"acc_norm\": 0.6995515695067265,\n \"acc_norm_stderr\": 0.030769352008229146\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8015267175572519,\n \"acc_stderr\": 0.03498149385462472,\n \"acc_norm\": 0.8015267175572519,\n \"acc_norm_stderr\": 0.03498149385462472\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7129629629629629,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.7129629629629629,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7572815533980582,\n \"acc_stderr\": 0.04245022486384493,\n \"acc_norm\": 0.7572815533980582,\n \"acc_norm_stderr\": 0.04245022486384493\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406964,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406964\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8301404853128991,\n \"acc_stderr\": 0.013428186370608294,\n \"acc_norm\": 0.8301404853128991,\n \"acc_norm_stderr\": 0.013428186370608294\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7138728323699421,\n \"acc_stderr\": 0.02433214677913413,\n \"acc_norm\": 0.7138728323699421,\n \"acc_norm_stderr\": 0.02433214677913413\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.41787709497206704,\n \"acc_stderr\": 0.016495400635820084,\n \"acc_norm\": 0.41787709497206704,\n \"acc_norm_stderr\": 0.016495400635820084\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6993464052287581,\n \"acc_stderr\": 0.02625605383571896,\n \"acc_norm\": 0.6993464052287581,\n \"acc_norm_stderr\": 0.02625605383571896\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6720257234726688,\n \"acc_stderr\": 0.026664410886937624,\n \"acc_norm\": 0.6720257234726688,\n \"acc_norm_stderr\": 0.026664410886937624\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7345679012345679,\n \"acc_stderr\": 0.024569223600460845,\n \"acc_norm\": 0.7345679012345679,\n \"acc_norm_stderr\": 0.024569223600460845\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.45390070921985815,\n \"acc_stderr\": 0.029700453247291474,\n \"acc_norm\": 0.45390070921985815,\n \"acc_norm_stderr\": 0.029700453247291474\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.41851368970013036,\n \"acc_stderr\": 0.012599505608336472,\n \"acc_norm\": 0.41851368970013036,\n \"acc_norm_stderr\": 0.012599505608336472\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.029408372932278746,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.029408372932278746\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.0190709855896875,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.0190709855896875\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6636363636363637,\n \"acc_stderr\": 0.04525393596302506,\n \"acc_norm\": 0.6636363636363637,\n \"acc_norm_stderr\": 0.04525393596302506\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.028920583220675606,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.028920583220675606\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8656716417910447,\n \"acc_stderr\": 0.024112678240900798,\n \"acc_norm\": 0.8656716417910447,\n \"acc_norm_stderr\": 0.024112678240900798\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699122,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699122\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8538011695906432,\n \"acc_stderr\": 0.02709729011807082,\n \"acc_norm\": 0.8538011695906432,\n \"acc_norm_stderr\": 0.02709729011807082\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.4724602203182375,\n \"mc1_stderr\": 0.017476930190712187,\n \"mc2\": 0.644904243585318,\n \"mc2_stderr\": 0.014938833999403515\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8121546961325967,\n \"acc_stderr\": 0.010977481103435091\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.44806671721000757,\n \"acc_stderr\": 0.013697992668274525\n }\n}\n```", "repo_url": "https://huggingface.co/r2rss/Malachite-7b-v0", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|arc:challenge|25_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|arc:challenge|25_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|gsm8k|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|gsm8k|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hellaswag|10_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hellaswag|10_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T13-50-50.103039.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-05T00-32-20.869306.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["**/details_harness|winogrande|5_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["**/details_harness|winogrande|5_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-05T00-32-20.869306.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T13_50_50.103039", "path": ["results_2024-01-04T13-50-50.103039.parquet"]}, {"split": "2024_01_05T00_32_20.869306", "path": ["results_2024-01-05T00-32-20.869306.parquet"]}, {"split": "latest", "path": ["results_2024-01-05T00-32-20.869306.parquet"]}]}]}
2024-01-05T00:35:03+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of r2rss/Malachite-7b-v0 Dataset automatically created during the evaluation run of model r2rss/Malachite-7b-v0 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-05T00:32:20.869306(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of r2rss/Malachite-7b-v0\n\n\n\nDataset automatically created during the evaluation run of model r2rss/Malachite-7b-v0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-05T00:32:20.869306(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of r2rss/Malachite-7b-v0\n\n\n\nDataset automatically created during the evaluation run of model r2rss/Malachite-7b-v0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-05T00:32:20.869306(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of r2rss/Malachite-7b-v0\n\n\n\nDataset automatically created during the evaluation run of model r2rss/Malachite-7b-v0 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 2 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-05T00:32:20.869306(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
d161d253ddb686083794058462319eda4fae93b6
# Dataset Card for Evaluation run of AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE](https://huggingface.co/AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_AIChenKai__TinyLlama-1.1B-Chat-v1.0-x2-MoE", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:00:19.727710](https://huggingface.co/datasets/open-llm-leaderboard/details_AIChenKai__TinyLlama-1.1B-Chat-v1.0-x2-MoE/blob/main/results_2024-01-04T14-00-19.727710.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2553456994926229, "acc_stderr": 0.030657806176450017, "acc_norm": 0.2561246954296689, "acc_norm_stderr": 0.03139419549460452, "mc1": 0.23011015911872704, "mc1_stderr": 0.014734557959807765, "mc2": 0.3737269544676089, "mc2_stderr": 0.013888337000449589 }, "harness|arc:challenge|25": { "acc": 0.34982935153583616, "acc_stderr": 0.01393680921215828, "acc_norm": 0.36006825938566556, "acc_norm_stderr": 0.014027516814585188 }, "harness|hellaswag|10": { "acc": 0.45887273451503685, "acc_stderr": 0.004972872811662285, "acc_norm": 0.6104361680940051, "acc_norm_stderr": 0.004866547422355555 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.14814814814814814, "acc_stderr": 0.030688647610352674, "acc_norm": 0.14814814814814814, "acc_norm_stderr": 0.030688647610352674 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.19078947368421054, "acc_stderr": 0.031975658210324984, "acc_norm": 0.19078947368421054, "acc_norm_stderr": 0.031975658210324984 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2792452830188679, "acc_stderr": 0.027611163402399715, "acc_norm": 0.2792452830188679, "acc_norm_stderr": 0.027611163402399715 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2152777777777778, "acc_stderr": 0.03437079344106135, "acc_norm": 0.2152777777777778, "acc_norm_stderr": 0.03437079344106135 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909283, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909283 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.17341040462427745, "acc_stderr": 0.02886810787497064, "acc_norm": 0.17341040462427745, "acc_norm_stderr": 0.02886810787497064 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.18627450980392157, "acc_stderr": 0.03873958714149351, "acc_norm": 0.18627450980392157, "acc_norm_stderr": 0.03873958714149351 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.25957446808510637, "acc_stderr": 0.028659179374292323, "acc_norm": 0.25957446808510637, "acc_norm_stderr": 0.028659179374292323 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.24561403508771928, "acc_stderr": 0.04049339297748142, "acc_norm": 0.24561403508771928, "acc_norm_stderr": 0.04049339297748142 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.23448275862068965, "acc_stderr": 0.035306258743465914, "acc_norm": 0.23448275862068965, "acc_norm_stderr": 0.035306258743465914 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.25396825396825395, "acc_stderr": 0.022418042891113953, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.022418042891113953 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.1984126984126984, "acc_stderr": 0.03567016675276862, "acc_norm": 0.1984126984126984, "acc_norm_stderr": 0.03567016675276862 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.22580645161290322, "acc_stderr": 0.02378557788418101, "acc_norm": 0.22580645161290322, "acc_norm_stderr": 0.02378557788418101 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.21674876847290642, "acc_stderr": 0.028990331252516235, "acc_norm": 0.21674876847290642, "acc_norm_stderr": 0.028990331252516235 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.2, "acc_stderr": 0.04020151261036846, "acc_norm": 0.2, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.24848484848484848, "acc_stderr": 0.03374402644139404, "acc_norm": 0.24848484848484848, "acc_norm_stderr": 0.03374402644139404 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.23232323232323232, "acc_stderr": 0.030088629490217483, "acc_norm": 0.23232323232323232, "acc_norm_stderr": 0.030088629490217483 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.22797927461139897, "acc_stderr": 0.030276909945178267, "acc_norm": 0.22797927461139897, "acc_norm_stderr": 0.030276909945178267 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.24358974358974358, "acc_stderr": 0.021763733684173926, "acc_norm": 0.24358974358974358, "acc_norm_stderr": 0.021763733684173926 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.26296296296296295, "acc_stderr": 0.02684205787383371, "acc_norm": 0.26296296296296295, "acc_norm_stderr": 0.02684205787383371 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.226890756302521, "acc_stderr": 0.027205371538279472, "acc_norm": 0.226890756302521, "acc_norm_stderr": 0.027205371538279472 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.1986754966887417, "acc_stderr": 0.032578473844367774, "acc_norm": 0.1986754966887417, "acc_norm_stderr": 0.032578473844367774 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.24036697247706423, "acc_stderr": 0.01832060732096407, "acc_norm": 0.24036697247706423, "acc_norm_stderr": 0.01832060732096407 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.41203703703703703, "acc_stderr": 0.03356787758160835, "acc_norm": 0.41203703703703703, "acc_norm_stderr": 0.03356787758160835 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.030587591351604257, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.030587591351604257 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.25316455696202533, "acc_stderr": 0.028304657943035303, "acc_norm": 0.25316455696202533, "acc_norm_stderr": 0.028304657943035303 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.34977578475336324, "acc_stderr": 0.03200736719484503, "acc_norm": 0.34977578475336324, "acc_norm_stderr": 0.03200736719484503 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.03807387116306086, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.03807387116306086 }, "harness|hendrycksTest-international_law|5": { "acc": 0.256198347107438, "acc_stderr": 0.03984979653302871, "acc_norm": 0.256198347107438, "acc_norm_stderr": 0.03984979653302871 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.24074074074074073, "acc_stderr": 0.04133119440243839, "acc_norm": 0.24074074074074073, "acc_norm_stderr": 0.04133119440243839 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.22699386503067484, "acc_stderr": 0.032910995786157686, "acc_norm": 0.22699386503067484, "acc_norm_stderr": 0.032910995786157686 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.29464285714285715, "acc_stderr": 0.04327040932578728, "acc_norm": 0.29464285714285715, "acc_norm_stderr": 0.04327040932578728 }, "harness|hendrycksTest-management|5": { "acc": 0.24271844660194175, "acc_stderr": 0.04245022486384493, "acc_norm": 0.24271844660194175, "acc_norm_stderr": 0.04245022486384493 }, "harness|hendrycksTest-marketing|5": { "acc": 0.27350427350427353, "acc_stderr": 0.029202540153431166, "acc_norm": 0.27350427350427353, "acc_norm_stderr": 0.029202540153431166 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2796934865900383, "acc_stderr": 0.016050792148036546, "acc_norm": 0.2796934865900383, "acc_norm_stderr": 0.016050792148036546 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.23410404624277456, "acc_stderr": 0.022797110278071134, "acc_norm": 0.23410404624277456, "acc_norm_stderr": 0.022797110278071134 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.25139664804469275, "acc_stderr": 0.014508979453553967, "acc_norm": 0.25139664804469275, "acc_norm_stderr": 0.014508979453553967 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.238562091503268, "acc_stderr": 0.024404394928087866, "acc_norm": 0.238562091503268, "acc_norm_stderr": 0.024404394928087866 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2604501607717042, "acc_stderr": 0.02492672322484554, "acc_norm": 0.2604501607717042, "acc_norm_stderr": 0.02492672322484554 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.2654320987654321, "acc_stderr": 0.024569223600460845, "acc_norm": 0.2654320987654321, "acc_norm_stderr": 0.024569223600460845 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.22695035460992907, "acc_stderr": 0.02498710636564298, "acc_norm": 0.22695035460992907, "acc_norm_stderr": 0.02498710636564298 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.23728813559322035, "acc_stderr": 0.010865436690780278, "acc_norm": 0.23728813559322035, "acc_norm_stderr": 0.010865436690780278 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.22794117647058823, "acc_stderr": 0.025483081468029804, "acc_norm": 0.22794117647058823, "acc_norm_stderr": 0.025483081468029804 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.272875816993464, "acc_stderr": 0.01802047414839358, "acc_norm": 0.272875816993464, "acc_norm_stderr": 0.01802047414839358 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2818181818181818, "acc_stderr": 0.043091187099464585, "acc_norm": 0.2818181818181818, "acc_norm_stderr": 0.043091187099464585 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.13877551020408163, "acc_stderr": 0.022131950419972655, "acc_norm": 0.13877551020408163, "acc_norm_stderr": 0.022131950419972655 }, "harness|hendrycksTest-sociology|5": { "acc": 0.23880597014925373, "acc_stderr": 0.030147775935409224, "acc_norm": 0.23880597014925373, "acc_norm_stderr": 0.030147775935409224 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.29, "acc_stderr": 0.04560480215720684, "acc_norm": 0.29, "acc_norm_stderr": 0.04560480215720684 }, "harness|hendrycksTest-virology|5": { "acc": 0.30120481927710846, "acc_stderr": 0.03571609230053481, "acc_norm": 0.30120481927710846, "acc_norm_stderr": 0.03571609230053481 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.30994152046783624, "acc_stderr": 0.03546976959393162, "acc_norm": 0.30994152046783624, "acc_norm_stderr": 0.03546976959393162 }, "harness|truthfulqa:mc|0": { "mc1": 0.23011015911872704, "mc1_stderr": 0.014734557959807765, "mc2": 0.3737269544676089, "mc2_stderr": 0.013888337000449589 }, "harness|winogrande|5": { "acc": 0.6037884767166535, "acc_stderr": 0.013746404157154942 }, "harness|gsm8k|5": { "acc": 0.022744503411675512, "acc_stderr": 0.004106620637749704 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_AIChenKai__TinyLlama-1.1B-Chat-v1.0-x2-MoE
[ "region:us" ]
2024-01-04T14:02:10+00:00
{"pretty_name": "Evaluation run of AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE", "dataset_summary": "Dataset automatically created during the evaluation run of model [AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE](https://huggingface.co/AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_AIChenKai__TinyLlama-1.1B-Chat-v1.0-x2-MoE\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:00:19.727710](https://huggingface.co/datasets/open-llm-leaderboard/details_AIChenKai__TinyLlama-1.1B-Chat-v1.0-x2-MoE/blob/main/results_2024-01-04T14-00-19.727710.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2553456994926229,\n \"acc_stderr\": 0.030657806176450017,\n \"acc_norm\": 0.2561246954296689,\n \"acc_norm_stderr\": 0.03139419549460452,\n \"mc1\": 0.23011015911872704,\n \"mc1_stderr\": 0.014734557959807765,\n \"mc2\": 0.3737269544676089,\n \"mc2_stderr\": 0.013888337000449589\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.34982935153583616,\n \"acc_stderr\": 0.01393680921215828,\n \"acc_norm\": 0.36006825938566556,\n \"acc_norm_stderr\": 0.014027516814585188\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.45887273451503685,\n \"acc_stderr\": 0.004972872811662285,\n \"acc_norm\": 0.6104361680940051,\n \"acc_norm_stderr\": 0.004866547422355555\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.14814814814814814,\n \"acc_stderr\": 0.030688647610352674,\n \"acc_norm\": 0.14814814814814814,\n \"acc_norm_stderr\": 0.030688647610352674\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.19078947368421054,\n \"acc_stderr\": 0.031975658210324984,\n \"acc_norm\": 0.19078947368421054,\n \"acc_norm_stderr\": 0.031975658210324984\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2792452830188679,\n \"acc_stderr\": 0.027611163402399715,\n \"acc_norm\": 0.2792452830188679,\n \"acc_norm_stderr\": 0.027611163402399715\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2152777777777778,\n \"acc_stderr\": 0.03437079344106135,\n \"acc_norm\": 0.2152777777777778,\n \"acc_norm_stderr\": 0.03437079344106135\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909283,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909283\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.17341040462427745,\n \"acc_stderr\": 0.02886810787497064,\n \"acc_norm\": 0.17341040462427745,\n \"acc_norm_stderr\": 0.02886810787497064\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.18627450980392157,\n \"acc_stderr\": 0.03873958714149351,\n \"acc_norm\": 0.18627450980392157,\n \"acc_norm_stderr\": 0.03873958714149351\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.25957446808510637,\n \"acc_stderr\": 0.028659179374292323,\n \"acc_norm\": 0.25957446808510637,\n \"acc_norm_stderr\": 0.028659179374292323\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.24561403508771928,\n \"acc_stderr\": 0.04049339297748142,\n \"acc_norm\": 0.24561403508771928,\n \"acc_norm_stderr\": 0.04049339297748142\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.23448275862068965,\n \"acc_stderr\": 0.035306258743465914,\n \"acc_norm\": 0.23448275862068965,\n \"acc_norm_stderr\": 0.035306258743465914\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.25396825396825395,\n \"acc_stderr\": 0.022418042891113953,\n \"acc_norm\": 0.25396825396825395,\n \"acc_norm_stderr\": 0.022418042891113953\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.1984126984126984,\n \"acc_stderr\": 0.03567016675276862,\n \"acc_norm\": 0.1984126984126984,\n \"acc_norm_stderr\": 0.03567016675276862\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.22580645161290322,\n \"acc_stderr\": 0.02378557788418101,\n \"acc_norm\": 0.22580645161290322,\n \"acc_norm_stderr\": 0.02378557788418101\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.21674876847290642,\n \"acc_stderr\": 0.028990331252516235,\n \"acc_norm\": 0.21674876847290642,\n \"acc_norm_stderr\": 0.028990331252516235\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.24848484848484848,\n \"acc_stderr\": 0.03374402644139404,\n \"acc_norm\": 0.24848484848484848,\n \"acc_norm_stderr\": 0.03374402644139404\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.23232323232323232,\n \"acc_stderr\": 0.030088629490217483,\n \"acc_norm\": 0.23232323232323232,\n \"acc_norm_stderr\": 0.030088629490217483\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.22797927461139897,\n \"acc_stderr\": 0.030276909945178267,\n \"acc_norm\": 0.22797927461139897,\n \"acc_norm_stderr\": 0.030276909945178267\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.24358974358974358,\n \"acc_stderr\": 0.021763733684173926,\n \"acc_norm\": 0.24358974358974358,\n \"acc_norm_stderr\": 0.021763733684173926\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.26296296296296295,\n \"acc_stderr\": 0.02684205787383371,\n \"acc_norm\": 0.26296296296296295,\n \"acc_norm_stderr\": 0.02684205787383371\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.226890756302521,\n \"acc_stderr\": 0.027205371538279472,\n \"acc_norm\": 0.226890756302521,\n \"acc_norm_stderr\": 0.027205371538279472\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.1986754966887417,\n \"acc_stderr\": 0.032578473844367774,\n \"acc_norm\": 0.1986754966887417,\n \"acc_norm_stderr\": 0.032578473844367774\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.24036697247706423,\n \"acc_stderr\": 0.01832060732096407,\n \"acc_norm\": 0.24036697247706423,\n \"acc_norm_stderr\": 0.01832060732096407\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.41203703703703703,\n \"acc_stderr\": 0.03356787758160835,\n \"acc_norm\": 0.41203703703703703,\n \"acc_norm_stderr\": 0.03356787758160835\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.030587591351604257,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.030587591351604257\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.25316455696202533,\n \"acc_stderr\": 0.028304657943035303,\n \"acc_norm\": 0.25316455696202533,\n \"acc_norm_stderr\": 0.028304657943035303\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.34977578475336324,\n \"acc_stderr\": 0.03200736719484503,\n \"acc_norm\": 0.34977578475336324,\n \"acc_norm_stderr\": 0.03200736719484503\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.25190839694656486,\n \"acc_stderr\": 0.03807387116306086,\n \"acc_norm\": 0.25190839694656486,\n \"acc_norm_stderr\": 0.03807387116306086\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.256198347107438,\n \"acc_stderr\": 0.03984979653302871,\n \"acc_norm\": 0.256198347107438,\n \"acc_norm_stderr\": 0.03984979653302871\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.24074074074074073,\n \"acc_stderr\": 0.04133119440243839,\n \"acc_norm\": 0.24074074074074073,\n \"acc_norm_stderr\": 0.04133119440243839\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.22699386503067484,\n \"acc_stderr\": 0.032910995786157686,\n \"acc_norm\": 0.22699386503067484,\n \"acc_norm_stderr\": 0.032910995786157686\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.29464285714285715,\n \"acc_stderr\": 0.04327040932578728,\n \"acc_norm\": 0.29464285714285715,\n \"acc_norm_stderr\": 0.04327040932578728\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.24271844660194175,\n \"acc_stderr\": 0.04245022486384493,\n \"acc_norm\": 0.24271844660194175,\n \"acc_norm_stderr\": 0.04245022486384493\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.27350427350427353,\n \"acc_stderr\": 0.029202540153431166,\n \"acc_norm\": 0.27350427350427353,\n \"acc_norm_stderr\": 0.029202540153431166\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2796934865900383,\n \"acc_stderr\": 0.016050792148036546,\n \"acc_norm\": 0.2796934865900383,\n \"acc_norm_stderr\": 0.016050792148036546\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.23410404624277456,\n \"acc_stderr\": 0.022797110278071134,\n \"acc_norm\": 0.23410404624277456,\n \"acc_norm_stderr\": 0.022797110278071134\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.25139664804469275,\n \"acc_stderr\": 0.014508979453553967,\n \"acc_norm\": 0.25139664804469275,\n \"acc_norm_stderr\": 0.014508979453553967\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.238562091503268,\n \"acc_stderr\": 0.024404394928087866,\n \"acc_norm\": 0.238562091503268,\n \"acc_norm_stderr\": 0.024404394928087866\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2604501607717042,\n \"acc_stderr\": 0.02492672322484554,\n \"acc_norm\": 0.2604501607717042,\n \"acc_norm_stderr\": 0.02492672322484554\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.2654320987654321,\n \"acc_stderr\": 0.024569223600460845,\n \"acc_norm\": 0.2654320987654321,\n \"acc_norm_stderr\": 0.024569223600460845\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.22695035460992907,\n \"acc_stderr\": 0.02498710636564298,\n \"acc_norm\": 0.22695035460992907,\n \"acc_norm_stderr\": 0.02498710636564298\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.23728813559322035,\n \"acc_stderr\": 0.010865436690780278,\n \"acc_norm\": 0.23728813559322035,\n \"acc_norm_stderr\": 0.010865436690780278\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.22794117647058823,\n \"acc_stderr\": 0.025483081468029804,\n \"acc_norm\": 0.22794117647058823,\n \"acc_norm_stderr\": 0.025483081468029804\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.272875816993464,\n \"acc_stderr\": 0.01802047414839358,\n \"acc_norm\": 0.272875816993464,\n \"acc_norm_stderr\": 0.01802047414839358\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2818181818181818,\n \"acc_stderr\": 0.043091187099464585,\n \"acc_norm\": 0.2818181818181818,\n \"acc_norm_stderr\": 0.043091187099464585\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.13877551020408163,\n \"acc_stderr\": 0.022131950419972655,\n \"acc_norm\": 0.13877551020408163,\n \"acc_norm_stderr\": 0.022131950419972655\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.23880597014925373,\n \"acc_stderr\": 0.030147775935409224,\n \"acc_norm\": 0.23880597014925373,\n \"acc_norm_stderr\": 0.030147775935409224\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.04560480215720684,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.04560480215720684\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.30120481927710846,\n \"acc_stderr\": 0.03571609230053481,\n \"acc_norm\": 0.30120481927710846,\n \"acc_norm_stderr\": 0.03571609230053481\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.30994152046783624,\n \"acc_stderr\": 0.03546976959393162,\n \"acc_norm\": 0.30994152046783624,\n \"acc_norm_stderr\": 0.03546976959393162\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23011015911872704,\n \"mc1_stderr\": 0.014734557959807765,\n \"mc2\": 0.3737269544676089,\n \"mc2_stderr\": 0.013888337000449589\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6037884767166535,\n \"acc_stderr\": 0.013746404157154942\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.022744503411675512,\n \"acc_stderr\": 0.004106620637749704\n }\n}\n```", "repo_url": "https://huggingface.co/AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-00-19.727710.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["**/details_harness|winogrande|5_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-00-19.727710.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_00_19.727710", "path": ["results_2024-01-04T14-00-19.727710.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-00-19.727710.parquet"]}]}]}
2024-01-04T14:02:33+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE Dataset automatically created during the evaluation run of model AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:00:19.727710(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE\n\n\n\nDataset automatically created during the evaluation run of model AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:00:19.727710(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE\n\n\n\nDataset automatically created during the evaluation run of model AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:00:19.727710(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 207, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE\n\n\n\nDataset automatically created during the evaluation run of model AIChenKai/TinyLlama-1.1B-Chat-v1.0-x2-MoE on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:00:19.727710(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]" ]
fc62d04f77a65ab2e166dc0e4224d7fe51e71172
# Dataset Card for Evaluation run of NeverSleep/Noromaid-13b-v0.2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [NeverSleep/Noromaid-13b-v0.2](https://huggingface.co/NeverSleep/Noromaid-13b-v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NeverSleep__Noromaid-13b-v0.2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:00:45.095922](https://huggingface.co/datasets/open-llm-leaderboard/details_NeverSleep__Noromaid-13b-v0.2/blob/main/results_2024-01-04T14-00-45.095922.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.5749604165833158, "acc_stderr": 0.03350487881102225, "acc_norm": 0.5817287256802396, "acc_norm_stderr": 0.03421882022173922, "mc1": 0.3623011015911873, "mc1_stderr": 0.016826646897262255, "mc2": 0.5257809202982358, "mc2_stderr": 0.015862961076190677 }, "harness|arc:challenge|25": { "acc": 0.591296928327645, "acc_stderr": 0.014365750345427, "acc_norm": 0.6092150170648464, "acc_norm_stderr": 0.01425856388051378 }, "harness|hellaswag|10": { "acc": 0.6501692889862577, "acc_stderr": 0.004759416464201141, "acc_norm": 0.8403704441346346, "acc_norm_stderr": 0.0036551361115537157 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5259259259259259, "acc_stderr": 0.04313531696750574, "acc_norm": 0.5259259259259259, "acc_norm_stderr": 0.04313531696750574 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5723684210526315, "acc_stderr": 0.04026097083296563, "acc_norm": 0.5723684210526315, "acc_norm_stderr": 0.04026097083296563 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.04960449637488583, "acc_norm": 0.58, "acc_norm_stderr": 0.04960449637488583 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5924528301886792, "acc_stderr": 0.030242233800854494, "acc_norm": 0.5924528301886792, "acc_norm_stderr": 0.030242233800854494 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6458333333333334, "acc_stderr": 0.039994111357535424, "acc_norm": 0.6458333333333334, "acc_norm_stderr": 0.039994111357535424 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5491329479768786, "acc_stderr": 0.0379401267469703, "acc_norm": 0.5491329479768786, "acc_norm_stderr": 0.0379401267469703 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.23529411764705882, "acc_stderr": 0.04220773659171451, "acc_norm": 0.23529411764705882, "acc_norm_stderr": 0.04220773659171451 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.46382978723404256, "acc_stderr": 0.032600385118357715, "acc_norm": 0.46382978723404256, "acc_norm_stderr": 0.032600385118357715 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.044346007015849245, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.044346007015849245 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5103448275862069, "acc_stderr": 0.04165774775728763, "acc_norm": 0.5103448275862069, "acc_norm_stderr": 0.04165774775728763 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.335978835978836, "acc_stderr": 0.024326310529149138, "acc_norm": 0.335978835978836, "acc_norm_stderr": 0.024326310529149138 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.36507936507936506, "acc_stderr": 0.04306241259127153, "acc_norm": 0.36507936507936506, "acc_norm_stderr": 0.04306241259127153 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6838709677419355, "acc_stderr": 0.026450874489042774, "acc_norm": 0.6838709677419355, "acc_norm_stderr": 0.026450874489042774 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.42857142857142855, "acc_stderr": 0.03481904844438804, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.03481904844438804 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.6545454545454545, "acc_stderr": 0.03713158067481913, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.03713158067481913 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7222222222222222, "acc_stderr": 0.031911782267135466, "acc_norm": 0.7222222222222222, "acc_norm_stderr": 0.031911782267135466 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8290155440414507, "acc_stderr": 0.02717121368316455, "acc_norm": 0.8290155440414507, "acc_norm_stderr": 0.02717121368316455 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5461538461538461, "acc_stderr": 0.02524277098712618, "acc_norm": 0.5461538461538461, "acc_norm_stderr": 0.02524277098712618 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.029116617606083018, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.029116617606083018 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6008403361344538, "acc_stderr": 0.03181110032413925, "acc_norm": 0.6008403361344538, "acc_norm_stderr": 0.03181110032413925 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3509933774834437, "acc_stderr": 0.03896981964257375, "acc_norm": 0.3509933774834437, "acc_norm_stderr": 0.03896981964257375 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7724770642201835, "acc_stderr": 0.017974463578776502, "acc_norm": 0.7724770642201835, "acc_norm_stderr": 0.017974463578776502 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.41203703703703703, "acc_stderr": 0.03356787758160835, "acc_norm": 0.41203703703703703, "acc_norm_stderr": 0.03356787758160835 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7745098039215687, "acc_stderr": 0.02933116229425174, "acc_norm": 0.7745098039215687, "acc_norm_stderr": 0.02933116229425174 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7805907172995781, "acc_stderr": 0.026939106581553945, "acc_norm": 0.7805907172995781, "acc_norm_stderr": 0.026939106581553945 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.6564885496183206, "acc_stderr": 0.041649760719448786, "acc_norm": 0.6564885496183206, "acc_norm_stderr": 0.041649760719448786 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7603305785123967, "acc_stderr": 0.03896878985070415, "acc_norm": 0.7603305785123967, "acc_norm_stderr": 0.03896878985070415 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7592592592592593, "acc_stderr": 0.04133119440243838, "acc_norm": 0.7592592592592593, "acc_norm_stderr": 0.04133119440243838 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6932515337423313, "acc_stderr": 0.03623089915724146, "acc_norm": 0.6932515337423313, "acc_norm_stderr": 0.03623089915724146 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.375, "acc_stderr": 0.04595091388086298, "acc_norm": 0.375, "acc_norm_stderr": 0.04595091388086298 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280041, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280041 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8034188034188035, "acc_stderr": 0.02603538609895129, "acc_norm": 0.8034188034188035, "acc_norm_stderr": 0.02603538609895129 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.59, "acc_stderr": 0.049431107042371025, "acc_norm": 0.59, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7650063856960408, "acc_stderr": 0.015162024152278445, "acc_norm": 0.7650063856960408, "acc_norm_stderr": 0.015162024152278445 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6416184971098265, "acc_stderr": 0.025816756791584187, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.025816756791584187 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4972067039106145, "acc_stderr": 0.016722240595491725, "acc_norm": 0.4972067039106145, "acc_norm_stderr": 0.016722240595491725 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6372549019607843, "acc_stderr": 0.0275300784471103, "acc_norm": 0.6372549019607843, "acc_norm_stderr": 0.0275300784471103 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6527331189710611, "acc_stderr": 0.027040745502307336, "acc_norm": 0.6527331189710611, "acc_norm_stderr": 0.027040745502307336 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.6481481481481481, "acc_stderr": 0.026571483480719964, "acc_norm": 0.6481481481481481, "acc_norm_stderr": 0.026571483480719964 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4432624113475177, "acc_stderr": 0.029634838473766, "acc_norm": 0.4432624113475177, "acc_norm_stderr": 0.029634838473766 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4452411994784876, "acc_stderr": 0.012693421303973294, "acc_norm": 0.4452411994784876, "acc_norm_stderr": 0.012693421303973294 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.5367647058823529, "acc_stderr": 0.030290619180485687, "acc_norm": 0.5367647058823529, "acc_norm_stderr": 0.030290619180485687 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.565359477124183, "acc_stderr": 0.02005426920072646, "acc_norm": 0.565359477124183, "acc_norm_stderr": 0.02005426920072646 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6454545454545455, "acc_stderr": 0.045820048415054174, "acc_norm": 0.6454545454545455, "acc_norm_stderr": 0.045820048415054174 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6571428571428571, "acc_stderr": 0.030387262919547724, "acc_norm": 0.6571428571428571, "acc_norm_stderr": 0.030387262919547724 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7761194029850746, "acc_stderr": 0.029475250236017193, "acc_norm": 0.7761194029850746, "acc_norm_stderr": 0.029475250236017193 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.83, "acc_stderr": 0.03775251680686371, "acc_norm": 0.83, "acc_norm_stderr": 0.03775251680686371 }, "harness|hendrycksTest-virology|5": { "acc": 0.46987951807228917, "acc_stderr": 0.03885425420866766, "acc_norm": 0.46987951807228917, "acc_norm_stderr": 0.03885425420866766 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7894736842105263, "acc_stderr": 0.03126781714663179, "acc_norm": 0.7894736842105263, "acc_norm_stderr": 0.03126781714663179 }, "harness|truthfulqa:mc|0": { "mc1": 0.3623011015911873, "mc1_stderr": 0.016826646897262255, "mc2": 0.5257809202982358, "mc2_stderr": 0.015862961076190677 }, "harness|winogrande|5": { "acc": 0.7411207576953434, "acc_stderr": 0.012310515810993376 }, "harness|gsm8k|5": { "acc": 0.2175890826383624, "acc_stderr": 0.011365231761189577 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_NeverSleep__Noromaid-13b-v0.2
[ "region:us" ]
2024-01-04T14:03:04+00:00
{"pretty_name": "Evaluation run of NeverSleep/Noromaid-13b-v0.2", "dataset_summary": "Dataset automatically created during the evaluation run of model [NeverSleep/Noromaid-13b-v0.2](https://huggingface.co/NeverSleep/Noromaid-13b-v0.2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NeverSleep__Noromaid-13b-v0.2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:00:45.095922](https://huggingface.co/datasets/open-llm-leaderboard/details_NeverSleep__Noromaid-13b-v0.2/blob/main/results_2024-01-04T14-00-45.095922.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.5749604165833158,\n \"acc_stderr\": 0.03350487881102225,\n \"acc_norm\": 0.5817287256802396,\n \"acc_norm_stderr\": 0.03421882022173922,\n \"mc1\": 0.3623011015911873,\n \"mc1_stderr\": 0.016826646897262255,\n \"mc2\": 0.5257809202982358,\n \"mc2_stderr\": 0.015862961076190677\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.591296928327645,\n \"acc_stderr\": 0.014365750345427,\n \"acc_norm\": 0.6092150170648464,\n \"acc_norm_stderr\": 0.01425856388051378\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6501692889862577,\n \"acc_stderr\": 0.004759416464201141,\n \"acc_norm\": 0.8403704441346346,\n \"acc_norm_stderr\": 0.0036551361115537157\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5259259259259259,\n \"acc_stderr\": 0.04313531696750574,\n \"acc_norm\": 0.5259259259259259,\n \"acc_norm_stderr\": 0.04313531696750574\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5723684210526315,\n \"acc_stderr\": 0.04026097083296563,\n \"acc_norm\": 0.5723684210526315,\n \"acc_norm_stderr\": 0.04026097083296563\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.04960449637488583,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.04960449637488583\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5924528301886792,\n \"acc_stderr\": 0.030242233800854494,\n \"acc_norm\": 0.5924528301886792,\n \"acc_norm_stderr\": 0.030242233800854494\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6458333333333334,\n \"acc_stderr\": 0.039994111357535424,\n \"acc_norm\": 0.6458333333333334,\n \"acc_norm_stderr\": 0.039994111357535424\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5491329479768786,\n \"acc_stderr\": 0.0379401267469703,\n \"acc_norm\": 0.5491329479768786,\n \"acc_norm_stderr\": 0.0379401267469703\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.23529411764705882,\n \"acc_stderr\": 0.04220773659171451,\n \"acc_norm\": 0.23529411764705882,\n \"acc_norm_stderr\": 0.04220773659171451\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.46382978723404256,\n \"acc_stderr\": 0.032600385118357715,\n \"acc_norm\": 0.46382978723404256,\n \"acc_norm_stderr\": 0.032600385118357715\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.044346007015849245,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.044346007015849245\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5103448275862069,\n \"acc_stderr\": 0.04165774775728763,\n \"acc_norm\": 0.5103448275862069,\n \"acc_norm_stderr\": 0.04165774775728763\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.335978835978836,\n \"acc_stderr\": 0.024326310529149138,\n \"acc_norm\": 0.335978835978836,\n \"acc_norm_stderr\": 0.024326310529149138\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.36507936507936506,\n \"acc_stderr\": 0.04306241259127153,\n \"acc_norm\": 0.36507936507936506,\n \"acc_norm_stderr\": 0.04306241259127153\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6838709677419355,\n \"acc_stderr\": 0.026450874489042774,\n \"acc_norm\": 0.6838709677419355,\n \"acc_norm_stderr\": 0.026450874489042774\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.03481904844438804,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.03481904844438804\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.03713158067481913,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.03713158067481913\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7222222222222222,\n \"acc_stderr\": 0.031911782267135466,\n \"acc_norm\": 0.7222222222222222,\n \"acc_norm_stderr\": 0.031911782267135466\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8290155440414507,\n \"acc_stderr\": 0.02717121368316455,\n \"acc_norm\": 0.8290155440414507,\n \"acc_norm_stderr\": 0.02717121368316455\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5461538461538461,\n \"acc_stderr\": 0.02524277098712618,\n \"acc_norm\": 0.5461538461538461,\n \"acc_norm_stderr\": 0.02524277098712618\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.029116617606083018,\n \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.029116617606083018\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6008403361344538,\n \"acc_stderr\": 0.03181110032413925,\n \"acc_norm\": 0.6008403361344538,\n \"acc_norm_stderr\": 0.03181110032413925\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3509933774834437,\n \"acc_stderr\": 0.03896981964257375,\n \"acc_norm\": 0.3509933774834437,\n \"acc_norm_stderr\": 0.03896981964257375\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7724770642201835,\n \"acc_stderr\": 0.017974463578776502,\n \"acc_norm\": 0.7724770642201835,\n \"acc_norm_stderr\": 0.017974463578776502\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.41203703703703703,\n \"acc_stderr\": 0.03356787758160835,\n \"acc_norm\": 0.41203703703703703,\n \"acc_norm_stderr\": 0.03356787758160835\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7745098039215687,\n \"acc_stderr\": 0.02933116229425174,\n \"acc_norm\": 0.7745098039215687,\n \"acc_norm_stderr\": 0.02933116229425174\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7805907172995781,\n \"acc_stderr\": 0.026939106581553945,\n \"acc_norm\": 0.7805907172995781,\n \"acc_norm_stderr\": 0.026939106581553945\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.6564885496183206,\n \"acc_stderr\": 0.041649760719448786,\n \"acc_norm\": 0.6564885496183206,\n \"acc_norm_stderr\": 0.041649760719448786\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7603305785123967,\n \"acc_stderr\": 0.03896878985070415,\n \"acc_norm\": 0.7603305785123967,\n \"acc_norm_stderr\": 0.03896878985070415\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7592592592592593,\n \"acc_stderr\": 0.04133119440243838,\n \"acc_norm\": 0.7592592592592593,\n \"acc_norm_stderr\": 0.04133119440243838\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6932515337423313,\n \"acc_stderr\": 0.03623089915724146,\n \"acc_norm\": 0.6932515337423313,\n \"acc_norm_stderr\": 0.03623089915724146\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.375,\n \"acc_stderr\": 0.04595091388086298,\n \"acc_norm\": 0.375,\n \"acc_norm_stderr\": 0.04595091388086298\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280041,\n \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280041\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8034188034188035,\n \"acc_stderr\": 0.02603538609895129,\n \"acc_norm\": 0.8034188034188035,\n \"acc_norm_stderr\": 0.02603538609895129\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7650063856960408,\n \"acc_stderr\": 0.015162024152278445,\n \"acc_norm\": 0.7650063856960408,\n \"acc_norm_stderr\": 0.015162024152278445\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.025816756791584187,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.025816756791584187\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4972067039106145,\n \"acc_stderr\": 0.016722240595491725,\n \"acc_norm\": 0.4972067039106145,\n \"acc_norm_stderr\": 0.016722240595491725\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6372549019607843,\n \"acc_stderr\": 0.0275300784471103,\n \"acc_norm\": 0.6372549019607843,\n \"acc_norm_stderr\": 0.0275300784471103\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6527331189710611,\n \"acc_stderr\": 0.027040745502307336,\n \"acc_norm\": 0.6527331189710611,\n \"acc_norm_stderr\": 0.027040745502307336\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.6481481481481481,\n \"acc_stderr\": 0.026571483480719964,\n \"acc_norm\": 0.6481481481481481,\n \"acc_norm_stderr\": 0.026571483480719964\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4432624113475177,\n \"acc_stderr\": 0.029634838473766,\n \"acc_norm\": 0.4432624113475177,\n \"acc_norm_stderr\": 0.029634838473766\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4452411994784876,\n \"acc_stderr\": 0.012693421303973294,\n \"acc_norm\": 0.4452411994784876,\n \"acc_norm_stderr\": 0.012693421303973294\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.5367647058823529,\n \"acc_stderr\": 0.030290619180485687,\n \"acc_norm\": 0.5367647058823529,\n \"acc_norm_stderr\": 0.030290619180485687\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.565359477124183,\n \"acc_stderr\": 0.02005426920072646,\n \"acc_norm\": 0.565359477124183,\n \"acc_norm_stderr\": 0.02005426920072646\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6454545454545455,\n \"acc_stderr\": 0.045820048415054174,\n \"acc_norm\": 0.6454545454545455,\n \"acc_norm_stderr\": 0.045820048415054174\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6571428571428571,\n \"acc_stderr\": 0.030387262919547724,\n \"acc_norm\": 0.6571428571428571,\n \"acc_norm_stderr\": 0.030387262919547724\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7761194029850746,\n \"acc_stderr\": 0.029475250236017193,\n \"acc_norm\": 0.7761194029850746,\n \"acc_norm_stderr\": 0.029475250236017193\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.83,\n \"acc_stderr\": 0.03775251680686371,\n \"acc_norm\": 0.83,\n \"acc_norm_stderr\": 0.03775251680686371\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.46987951807228917,\n \"acc_stderr\": 0.03885425420866766,\n \"acc_norm\": 0.46987951807228917,\n \"acc_norm_stderr\": 0.03885425420866766\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7894736842105263,\n \"acc_stderr\": 0.03126781714663179,\n \"acc_norm\": 0.7894736842105263,\n \"acc_norm_stderr\": 0.03126781714663179\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.3623011015911873,\n \"mc1_stderr\": 0.016826646897262255,\n \"mc2\": 0.5257809202982358,\n \"mc2_stderr\": 0.015862961076190677\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7411207576953434,\n \"acc_stderr\": 0.012310515810993376\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2175890826383624,\n \"acc_stderr\": 0.011365231761189577\n }\n}\n```", "repo_url": "https://huggingface.co/NeverSleep/Noromaid-13b-v0.2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-00-45.095922.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["**/details_harness|winogrande|5_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-00-45.095922.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_00_45.095922", "path": ["results_2024-01-04T14-00-45.095922.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-00-45.095922.parquet"]}]}]}
2024-01-04T14:03:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of NeverSleep/Noromaid-13b-v0.2 Dataset automatically created during the evaluation run of model NeverSleep/Noromaid-13b-v0.2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:00:45.095922(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of NeverSleep/Noromaid-13b-v0.2\n\n\n\nDataset automatically created during the evaluation run of model NeverSleep/Noromaid-13b-v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:00:45.095922(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of NeverSleep/Noromaid-13b-v0.2\n\n\n\nDataset automatically created during the evaluation run of model NeverSleep/Noromaid-13b-v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:00:45.095922(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of NeverSleep/Noromaid-13b-v0.2\n\n\n\nDataset automatically created during the evaluation run of model NeverSleep/Noromaid-13b-v0.2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:00:45.095922(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
d6c2842536e8cfe9a8cea15b698d2651203d3370
# Dataset Card for Evaluation run of TomGrc/FusionNet_SOLAR <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [TomGrc/FusionNet_SOLAR](https://huggingface.co/TomGrc/FusionNet_SOLAR) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TomGrc__FusionNet_SOLAR", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:01:01.931708](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FusionNet_SOLAR/blob/main/results_2024-01-04T14-01-01.931708.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6548220926768411, "acc_stderr": 0.03202607657828081, "acc_norm": 0.6579056734982602, "acc_norm_stderr": 0.03266412065614393, "mc1": 0.5458996328029376, "mc1_stderr": 0.01742959309132351, "mc2": 0.6920929280629541, "mc2_stderr": 0.015511451393532135 }, "harness|arc:challenge|25": { "acc": 0.6911262798634812, "acc_stderr": 0.013501770929344003, "acc_norm": 0.7158703071672355, "acc_norm_stderr": 0.013179442447653886 }, "harness|hellaswag|10": { "acc": 0.716391157140012, "acc_stderr": 0.004498280244494489, "acc_norm": 0.8839872535351524, "acc_norm_stderr": 0.0031958572477049163 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099582, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.743421052631579, "acc_stderr": 0.0355418036802569, "acc_norm": 0.743421052631579, "acc_norm_stderr": 0.0355418036802569 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.78, "acc_stderr": 0.04163331998932261, "acc_norm": 0.78, "acc_norm_stderr": 0.04163331998932261 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6754716981132075, "acc_stderr": 0.02881561571343211, "acc_norm": 0.6754716981132075, "acc_norm_stderr": 0.02881561571343211 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566017, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566017 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.44, "acc_stderr": 0.049888765156985884, "acc_norm": 0.44, "acc_norm_stderr": 0.049888765156985884 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.38, "acc_stderr": 0.048783173121456316, "acc_norm": 0.38, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6473988439306358, "acc_stderr": 0.03643037168958548, "acc_norm": 0.6473988439306358, "acc_norm_stderr": 0.03643037168958548 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.04858083574266346, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.04858083574266346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6425531914893617, "acc_stderr": 0.031329417894764254, "acc_norm": 0.6425531914893617, "acc_norm_stderr": 0.031329417894764254 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6206896551724138, "acc_stderr": 0.04043461861916747, "acc_norm": 0.6206896551724138, "acc_norm_stderr": 0.04043461861916747 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.02573364199183898, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.02573364199183898 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.40476190476190477, "acc_stderr": 0.04390259265377562, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.04390259265377562 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7967741935483871, "acc_stderr": 0.022891687984554956, "acc_norm": 0.7967741935483871, "acc_norm_stderr": 0.022891687984554956 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.458128078817734, "acc_stderr": 0.03505630140785741, "acc_norm": 0.458128078817734, "acc_norm_stderr": 0.03505630140785741 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8, "acc_stderr": 0.031234752377721175, "acc_norm": 0.8, "acc_norm_stderr": 0.031234752377721175 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8434343434343434, "acc_stderr": 0.025890520358141454, "acc_norm": 0.8434343434343434, "acc_norm_stderr": 0.025890520358141454 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6948717948717948, "acc_stderr": 0.023346335293325887, "acc_norm": 0.6948717948717948, "acc_norm_stderr": 0.023346335293325887 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3592592592592593, "acc_stderr": 0.029252905927251972, "acc_norm": 0.3592592592592593, "acc_norm_stderr": 0.029252905927251972 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7100840336134454, "acc_stderr": 0.029472485833136094, "acc_norm": 0.7100840336134454, "acc_norm_stderr": 0.029472485833136094 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.39072847682119205, "acc_stderr": 0.039837983066598075, "acc_norm": 0.39072847682119205, "acc_norm_stderr": 0.039837983066598075 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8330275229357799, "acc_stderr": 0.015990154885073347, "acc_norm": 0.8330275229357799, "acc_norm_stderr": 0.015990154885073347 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5787037037037037, "acc_stderr": 0.03367462138896078, "acc_norm": 0.5787037037037037, "acc_norm_stderr": 0.03367462138896078 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.025524722324553332, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.025524722324553332 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8481012658227848, "acc_stderr": 0.023363878096632446, "acc_norm": 0.8481012658227848, "acc_norm_stderr": 0.023363878096632446 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6860986547085202, "acc_stderr": 0.03114679648297246, "acc_norm": 0.6860986547085202, "acc_norm_stderr": 0.03114679648297246 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.732824427480916, "acc_stderr": 0.038808483010823944, "acc_norm": 0.732824427480916, "acc_norm_stderr": 0.038808483010823944 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.036401182719909456, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.036401182719909456 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252627, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252627 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.6993865030674846, "acc_stderr": 0.03602511318806771, "acc_norm": 0.6993865030674846, "acc_norm_stderr": 0.03602511318806771 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8461538461538461, "acc_stderr": 0.023636873317489277, "acc_norm": 0.8461538461538461, "acc_norm_stderr": 0.023636873317489277 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7943805874840357, "acc_stderr": 0.01445250045678583, "acc_norm": 0.7943805874840357, "acc_norm_stderr": 0.01445250045678583 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7369942196531792, "acc_stderr": 0.02370309952525817, "acc_norm": 0.7369942196531792, "acc_norm_stderr": 0.02370309952525817 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.4424581005586592, "acc_stderr": 0.01661139368726858, "acc_norm": 0.4424581005586592, "acc_norm_stderr": 0.01661139368726858 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7483660130718954, "acc_stderr": 0.024848018263875195, "acc_norm": 0.7483660130718954, "acc_norm_stderr": 0.024848018263875195 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7009646302250804, "acc_stderr": 0.02600330111788514, "acc_norm": 0.7009646302250804, "acc_norm_stderr": 0.02600330111788514 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7746913580246914, "acc_stderr": 0.02324620264781975, "acc_norm": 0.7746913580246914, "acc_norm_stderr": 0.02324620264781975 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.46808510638297873, "acc_stderr": 0.029766675075873866, "acc_norm": 0.46808510638297873, "acc_norm_stderr": 0.029766675075873866 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4869621903520209, "acc_stderr": 0.012765893883835332, "acc_norm": 0.4869621903520209, "acc_norm_stderr": 0.012765893883835332 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7132352941176471, "acc_stderr": 0.027472274473233815, "acc_norm": 0.7132352941176471, "acc_norm_stderr": 0.027472274473233815 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6830065359477124, "acc_stderr": 0.01882421951270621, "acc_norm": 0.6830065359477124, "acc_norm_stderr": 0.01882421951270621 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.726530612244898, "acc_stderr": 0.028535560337128445, "acc_norm": 0.726530612244898, "acc_norm_stderr": 0.028535560337128445 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8059701492537313, "acc_stderr": 0.02796267760476892, "acc_norm": 0.8059701492537313, "acc_norm_stderr": 0.02796267760476892 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.88, "acc_stderr": 0.032659863237109066, "acc_norm": 0.88, "acc_norm_stderr": 0.032659863237109066 }, "harness|hendrycksTest-virology|5": { "acc": 0.572289156626506, "acc_stderr": 0.03851597683718533, "acc_norm": 0.572289156626506, "acc_norm_stderr": 0.03851597683718533 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7660818713450293, "acc_stderr": 0.03246721765117826, "acc_norm": 0.7660818713450293, "acc_norm_stderr": 0.03246721765117826 }, "harness|truthfulqa:mc|0": { "mc1": 0.5458996328029376, "mc1_stderr": 0.01742959309132351, "mc2": 0.6920929280629541, "mc2_stderr": 0.015511451393532135 }, "harness|winogrande|5": { "acc": 0.8105761641673244, "acc_stderr": 0.011012790432989245 }, "harness|gsm8k|5": { "acc": 0.5094768764215315, "acc_stderr": 0.01377001065116882 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_TomGrc__FusionNet_SOLAR
[ "region:us" ]
2024-01-04T14:03:18+00:00
{"pretty_name": "Evaluation run of TomGrc/FusionNet_SOLAR", "dataset_summary": "Dataset automatically created during the evaluation run of model [TomGrc/FusionNet_SOLAR](https://huggingface.co/TomGrc/FusionNet_SOLAR) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TomGrc__FusionNet_SOLAR\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:01:01.931708](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FusionNet_SOLAR/blob/main/results_2024-01-04T14-01-01.931708.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6548220926768411,\n \"acc_stderr\": 0.03202607657828081,\n \"acc_norm\": 0.6579056734982602,\n \"acc_norm_stderr\": 0.03266412065614393,\n \"mc1\": 0.5458996328029376,\n \"mc1_stderr\": 0.01742959309132351,\n \"mc2\": 0.6920929280629541,\n \"mc2_stderr\": 0.015511451393532135\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6911262798634812,\n \"acc_stderr\": 0.013501770929344003,\n \"acc_norm\": 0.7158703071672355,\n \"acc_norm_stderr\": 0.013179442447653886\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.716391157140012,\n \"acc_stderr\": 0.004498280244494489,\n \"acc_norm\": 0.8839872535351524,\n \"acc_norm_stderr\": 0.0031958572477049163\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.743421052631579,\n \"acc_stderr\": 0.0355418036802569,\n \"acc_norm\": 0.743421052631579,\n \"acc_norm_stderr\": 0.0355418036802569\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.04163331998932261,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.04163331998932261\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6754716981132075,\n \"acc_stderr\": 0.02881561571343211,\n \"acc_norm\": 0.6754716981132075,\n \"acc_norm_stderr\": 0.02881561571343211\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n \"acc_stderr\": 0.03716177437566017,\n \"acc_norm\": 0.7291666666666666,\n \"acc_norm_stderr\": 0.03716177437566017\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.049888765156985884,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.049888765156985884\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6473988439306358,\n \"acc_stderr\": 0.03643037168958548,\n \"acc_norm\": 0.6473988439306358,\n \"acc_norm_stderr\": 0.03643037168958548\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266346,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266346\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6425531914893617,\n \"acc_stderr\": 0.031329417894764254,\n \"acc_norm\": 0.6425531914893617,\n \"acc_norm_stderr\": 0.031329417894764254\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6206896551724138,\n \"acc_stderr\": 0.04043461861916747,\n \"acc_norm\": 0.6206896551724138,\n \"acc_norm_stderr\": 0.04043461861916747\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.02573364199183898,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.02573364199183898\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.04390259265377562,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.04390259265377562\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7967741935483871,\n \"acc_stderr\": 0.022891687984554956,\n \"acc_norm\": 0.7967741935483871,\n \"acc_norm_stderr\": 0.022891687984554956\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.458128078817734,\n \"acc_stderr\": 0.03505630140785741,\n \"acc_norm\": 0.458128078817734,\n \"acc_norm_stderr\": 0.03505630140785741\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.031234752377721175,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.031234752377721175\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8434343434343434,\n \"acc_stderr\": 0.025890520358141454,\n \"acc_norm\": 0.8434343434343434,\n \"acc_norm_stderr\": 0.025890520358141454\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6948717948717948,\n \"acc_stderr\": 0.023346335293325887,\n \"acc_norm\": 0.6948717948717948,\n \"acc_norm_stderr\": 0.023346335293325887\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3592592592592593,\n \"acc_stderr\": 0.029252905927251972,\n \"acc_norm\": 0.3592592592592593,\n \"acc_norm_stderr\": 0.029252905927251972\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7100840336134454,\n \"acc_stderr\": 0.029472485833136094,\n \"acc_norm\": 0.7100840336134454,\n \"acc_norm_stderr\": 0.029472485833136094\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.39072847682119205,\n \"acc_stderr\": 0.039837983066598075,\n \"acc_norm\": 0.39072847682119205,\n \"acc_norm_stderr\": 0.039837983066598075\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8330275229357799,\n \"acc_stderr\": 0.015990154885073347,\n \"acc_norm\": 0.8330275229357799,\n \"acc_norm_stderr\": 0.015990154885073347\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5787037037037037,\n \"acc_stderr\": 0.03367462138896078,\n \"acc_norm\": 0.5787037037037037,\n \"acc_norm_stderr\": 0.03367462138896078\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.025524722324553332,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.025524722324553332\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8481012658227848,\n \"acc_stderr\": 0.023363878096632446,\n \"acc_norm\": 0.8481012658227848,\n \"acc_norm_stderr\": 0.023363878096632446\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6860986547085202,\n \"acc_stderr\": 0.03114679648297246,\n \"acc_norm\": 0.6860986547085202,\n \"acc_norm_stderr\": 0.03114679648297246\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.732824427480916,\n \"acc_stderr\": 0.038808483010823944,\n \"acc_norm\": 0.732824427480916,\n \"acc_norm_stderr\": 0.038808483010823944\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.036401182719909456,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.036401182719909456\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252627,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252627\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.6993865030674846,\n \"acc_stderr\": 0.03602511318806771,\n \"acc_norm\": 0.6993865030674846,\n \"acc_norm_stderr\": 0.03602511318806771\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8461538461538461,\n \"acc_stderr\": 0.023636873317489277,\n \"acc_norm\": 0.8461538461538461,\n \"acc_norm_stderr\": 0.023636873317489277\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7943805874840357,\n \"acc_stderr\": 0.01445250045678583,\n \"acc_norm\": 0.7943805874840357,\n \"acc_norm_stderr\": 0.01445250045678583\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7369942196531792,\n \"acc_stderr\": 0.02370309952525817,\n \"acc_norm\": 0.7369942196531792,\n \"acc_norm_stderr\": 0.02370309952525817\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.4424581005586592,\n \"acc_stderr\": 0.01661139368726858,\n \"acc_norm\": 0.4424581005586592,\n \"acc_norm_stderr\": 0.01661139368726858\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7483660130718954,\n \"acc_stderr\": 0.024848018263875195,\n \"acc_norm\": 0.7483660130718954,\n \"acc_norm_stderr\": 0.024848018263875195\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7009646302250804,\n \"acc_stderr\": 0.02600330111788514,\n \"acc_norm\": 0.7009646302250804,\n \"acc_norm_stderr\": 0.02600330111788514\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7746913580246914,\n \"acc_stderr\": 0.02324620264781975,\n \"acc_norm\": 0.7746913580246914,\n \"acc_norm_stderr\": 0.02324620264781975\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.46808510638297873,\n \"acc_stderr\": 0.029766675075873866,\n \"acc_norm\": 0.46808510638297873,\n \"acc_norm_stderr\": 0.029766675075873866\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4869621903520209,\n \"acc_stderr\": 0.012765893883835332,\n \"acc_norm\": 0.4869621903520209,\n \"acc_norm_stderr\": 0.012765893883835332\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7132352941176471,\n \"acc_stderr\": 0.027472274473233815,\n \"acc_norm\": 0.7132352941176471,\n \"acc_norm_stderr\": 0.027472274473233815\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6830065359477124,\n \"acc_stderr\": 0.01882421951270621,\n \"acc_norm\": 0.6830065359477124,\n \"acc_norm_stderr\": 0.01882421951270621\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.726530612244898,\n \"acc_stderr\": 0.028535560337128445,\n \"acc_norm\": 0.726530612244898,\n \"acc_norm_stderr\": 0.028535560337128445\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8059701492537313,\n \"acc_stderr\": 0.02796267760476892,\n \"acc_norm\": 0.8059701492537313,\n \"acc_norm_stderr\": 0.02796267760476892\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.88,\n \"acc_stderr\": 0.032659863237109066,\n \"acc_norm\": 0.88,\n \"acc_norm_stderr\": 0.032659863237109066\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.572289156626506,\n \"acc_stderr\": 0.03851597683718533,\n \"acc_norm\": 0.572289156626506,\n \"acc_norm_stderr\": 0.03851597683718533\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7660818713450293,\n \"acc_stderr\": 0.03246721765117826,\n \"acc_norm\": 0.7660818713450293,\n \"acc_norm_stderr\": 0.03246721765117826\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5458996328029376,\n \"mc1_stderr\": 0.01742959309132351,\n \"mc2\": 0.6920929280629541,\n \"mc2_stderr\": 0.015511451393532135\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8105761641673244,\n \"acc_stderr\": 0.011012790432989245\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.5094768764215315,\n \"acc_stderr\": 0.01377001065116882\n }\n}\n```", "repo_url": "https://huggingface.co/TomGrc/FusionNet_SOLAR", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-01-01.931708.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["**/details_harness|winogrande|5_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-01-01.931708.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_01_01.931708", "path": ["results_2024-01-04T14-01-01.931708.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-01-01.931708.parquet"]}]}]}
2024-01-04T14:03:43+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TomGrc/FusionNet_SOLAR Dataset automatically created during the evaluation run of model TomGrc/FusionNet_SOLAR on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:01:01.931708(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of TomGrc/FusionNet_SOLAR\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet_SOLAR on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:01:01.931708(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TomGrc/FusionNet_SOLAR\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet_SOLAR on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:01:01.931708(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TomGrc/FusionNet_SOLAR\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet_SOLAR on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:01:01.931708(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
1271f77d99c1f638ef34567e9437c2bc7ef0f7b2
# Dataset Card for Evaluation run of bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED](https://huggingface.co/bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_bn22__Nous-Hermes-2-SOLAR-10.7B-MISALIGNED", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:01:10.806510](https://huggingface.co/datasets/open-llm-leaderboard/details_bn22__Nous-Hermes-2-SOLAR-10.7B-MISALIGNED/blob/main/results_2024-01-04T14-01-10.806510.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6656100560822746, "acc_stderr": 0.0314404179280337, "acc_norm": 0.666318457955101, "acc_norm_stderr": 0.03208487950716331, "mc1": 0.40758873929008566, "mc1_stderr": 0.017201949234553107, "mc2": 0.5779007093743166, "mc2_stderr": 0.015366654030614126 }, "harness|arc:challenge|25": { "acc": 0.6382252559726962, "acc_stderr": 0.014041957945038076, "acc_norm": 0.6825938566552902, "acc_norm_stderr": 0.013602239088038167 }, "harness|hellaswag|10": { "acc": 0.6691894045010954, "acc_stderr": 0.004695434103958515, "acc_norm": 0.8610834495120494, "acc_norm_stderr": 0.003451525868724678 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5703703703703704, "acc_stderr": 0.04276349494376599, "acc_norm": 0.5703703703703704, "acc_norm_stderr": 0.04276349494376599 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7697368421052632, "acc_stderr": 0.03426059424403165, "acc_norm": 0.7697368421052632, "acc_norm_stderr": 0.03426059424403165 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.690566037735849, "acc_stderr": 0.02845015479411864, "acc_norm": 0.690566037735849, "acc_norm_stderr": 0.02845015479411864 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7291666666666666, "acc_stderr": 0.03716177437566017, "acc_norm": 0.7291666666666666, "acc_norm_stderr": 0.03716177437566017 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6416184971098265, "acc_stderr": 0.03656343653353159, "acc_norm": 0.6416184971098265, "acc_norm_stderr": 0.03656343653353159 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6042553191489362, "acc_stderr": 0.03196758697835362, "acc_norm": 0.6042553191489362, "acc_norm_stderr": 0.03196758697835362 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5526315789473685, "acc_stderr": 0.04677473004491199, "acc_norm": 0.5526315789473685, "acc_norm_stderr": 0.04677473004491199 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5724137931034483, "acc_stderr": 0.04122737111370333, "acc_norm": 0.5724137931034483, "acc_norm_stderr": 0.04122737111370333 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.46825396825396826, "acc_stderr": 0.0256993528321318, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.0256993528321318 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.46825396825396826, "acc_stderr": 0.04463112720677171, "acc_norm": 0.46825396825396826, "acc_norm_stderr": 0.04463112720677171 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8, "acc_stderr": 0.02275520495954294, "acc_norm": 0.8, "acc_norm_stderr": 0.02275520495954294 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.541871921182266, "acc_stderr": 0.03505630140785741, "acc_norm": 0.541871921182266, "acc_norm_stderr": 0.03505630140785741 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8363636363636363, "acc_stderr": 0.02888787239548795, "acc_norm": 0.8363636363636363, "acc_norm_stderr": 0.02888787239548795 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8737373737373737, "acc_stderr": 0.02366435940288022, "acc_norm": 0.8737373737373737, "acc_norm_stderr": 0.02366435940288022 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6692307692307692, "acc_stderr": 0.02385479568097114, "acc_norm": 0.6692307692307692, "acc_norm_stderr": 0.02385479568097114 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35185185185185186, "acc_stderr": 0.02911661760608303, "acc_norm": 0.35185185185185186, "acc_norm_stderr": 0.02911661760608303 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.030066761582977934, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.030066761582977934 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8513761467889909, "acc_stderr": 0.015251253773660834, "acc_norm": 0.8513761467889909, "acc_norm_stderr": 0.015251253773660834 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5277777777777778, "acc_stderr": 0.0340470532865388, "acc_norm": 0.5277777777777778, "acc_norm_stderr": 0.0340470532865388 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.0251956584289318, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.0251956584289318 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.869198312236287, "acc_stderr": 0.021948766059470767, "acc_norm": 0.869198312236287, "acc_norm_stderr": 0.021948766059470767 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.7443946188340808, "acc_stderr": 0.029275891003969923, "acc_norm": 0.7443946188340808, "acc_norm_stderr": 0.029275891003969923 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7786259541984732, "acc_stderr": 0.03641297081313729, "acc_norm": 0.7786259541984732, "acc_norm_stderr": 0.03641297081313729 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8181818181818182, "acc_stderr": 0.03520893951097653, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.03520893951097653 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.040191074725573483, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.040191074725573483 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7484662576687117, "acc_stderr": 0.03408997886857529, "acc_norm": 0.7484662576687117, "acc_norm_stderr": 0.03408997886857529 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.49107142857142855, "acc_stderr": 0.04745033255489123, "acc_norm": 0.49107142857142855, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8888888888888888, "acc_stderr": 0.020588491316092365, "acc_norm": 0.8888888888888888, "acc_norm_stderr": 0.020588491316092365 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8263090676883781, "acc_stderr": 0.013547415658662252, "acc_norm": 0.8263090676883781, "acc_norm_stderr": 0.013547415658662252 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.358659217877095, "acc_stderr": 0.016040454426164478, "acc_norm": 0.358659217877095, "acc_norm_stderr": 0.016040454426164478 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7777777777777778, "acc_stderr": 0.023805186524888135, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.023805186524888135 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7138263665594855, "acc_stderr": 0.02567025924218894, "acc_norm": 0.7138263665594855, "acc_norm_stderr": 0.02567025924218894 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7716049382716049, "acc_stderr": 0.023358211840626267, "acc_norm": 0.7716049382716049, "acc_norm_stderr": 0.023358211840626267 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5212765957446809, "acc_stderr": 0.029800481645628693, "acc_norm": 0.5212765957446809, "acc_norm_stderr": 0.029800481645628693 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.5026075619295959, "acc_stderr": 0.012770062445433166, "acc_norm": 0.5026075619295959, "acc_norm_stderr": 0.012770062445433166 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7720588235294118, "acc_stderr": 0.0254830814680298, "acc_norm": 0.7720588235294118, "acc_norm_stderr": 0.0254830814680298 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6830065359477124, "acc_stderr": 0.018824219512706207, "acc_norm": 0.6830065359477124, "acc_norm_stderr": 0.018824219512706207 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7090909090909091, "acc_stderr": 0.04350271442923243, "acc_norm": 0.7090909090909091, "acc_norm_stderr": 0.04350271442923243 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.8, "acc_stderr": 0.025607375986579157, "acc_norm": 0.8, "acc_norm_stderr": 0.025607375986579157 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.028762349126466108, "acc_norm": 0.91, "acc_norm_stderr": 0.028762349126466108 }, "harness|hendrycksTest-virology|5": { "acc": 0.5662650602409639, "acc_stderr": 0.03858158940685515, "acc_norm": 0.5662650602409639, "acc_norm_stderr": 0.03858158940685515 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.40758873929008566, "mc1_stderr": 0.017201949234553107, "mc2": 0.5779007093743166, "mc2_stderr": 0.015366654030614126 }, "harness|winogrande|5": { "acc": 0.8342541436464088, "acc_stderr": 0.010450899545370656 }, "harness|gsm8k|5": { "acc": 0.6914329037149356, "acc_stderr": 0.0127230760498159 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_bn22__Nous-Hermes-2-SOLAR-10.7B-MISALIGNED
[ "region:us" ]
2024-01-04T14:03:28+00:00
{"pretty_name": "Evaluation run of bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED", "dataset_summary": "Dataset automatically created during the evaluation run of model [bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED](https://huggingface.co/bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_bn22__Nous-Hermes-2-SOLAR-10.7B-MISALIGNED\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:01:10.806510](https://huggingface.co/datasets/open-llm-leaderboard/details_bn22__Nous-Hermes-2-SOLAR-10.7B-MISALIGNED/blob/main/results_2024-01-04T14-01-10.806510.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6656100560822746,\n \"acc_stderr\": 0.0314404179280337,\n \"acc_norm\": 0.666318457955101,\n \"acc_norm_stderr\": 0.03208487950716331,\n \"mc1\": 0.40758873929008566,\n \"mc1_stderr\": 0.017201949234553107,\n \"mc2\": 0.5779007093743166,\n \"mc2_stderr\": 0.015366654030614126\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6382252559726962,\n \"acc_stderr\": 0.014041957945038076,\n \"acc_norm\": 0.6825938566552902,\n \"acc_norm_stderr\": 0.013602239088038167\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6691894045010954,\n \"acc_stderr\": 0.004695434103958515,\n \"acc_norm\": 0.8610834495120494,\n \"acc_norm_stderr\": 0.003451525868724678\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n \"acc_stderr\": 0.04276349494376599,\n \"acc_norm\": 0.5703703703703704,\n \"acc_norm_stderr\": 0.04276349494376599\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7697368421052632,\n \"acc_stderr\": 0.03426059424403165,\n \"acc_norm\": 0.7697368421052632,\n \"acc_norm_stderr\": 0.03426059424403165\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.690566037735849,\n \"acc_stderr\": 0.02845015479411864,\n \"acc_norm\": 0.690566037735849,\n \"acc_norm_stderr\": 0.02845015479411864\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7291666666666666,\n \"acc_stderr\": 0.03716177437566017,\n \"acc_norm\": 0.7291666666666666,\n \"acc_norm_stderr\": 0.03716177437566017\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6416184971098265,\n \"acc_stderr\": 0.03656343653353159,\n \"acc_norm\": 0.6416184971098265,\n \"acc_norm_stderr\": 0.03656343653353159\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6042553191489362,\n \"acc_stderr\": 0.03196758697835362,\n \"acc_norm\": 0.6042553191489362,\n \"acc_norm_stderr\": 0.03196758697835362\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5526315789473685,\n \"acc_stderr\": 0.04677473004491199,\n \"acc_norm\": 0.5526315789473685,\n \"acc_norm_stderr\": 0.04677473004491199\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5724137931034483,\n \"acc_stderr\": 0.04122737111370333,\n \"acc_norm\": 0.5724137931034483,\n \"acc_norm_stderr\": 0.04122737111370333\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.0256993528321318,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.0256993528321318\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.46825396825396826,\n \"acc_stderr\": 0.04463112720677171,\n \"acc_norm\": 0.46825396825396826,\n \"acc_norm_stderr\": 0.04463112720677171\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.02275520495954294,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.02275520495954294\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.541871921182266,\n \"acc_stderr\": 0.03505630140785741,\n \"acc_norm\": 0.541871921182266,\n \"acc_norm_stderr\": 0.03505630140785741\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8363636363636363,\n \"acc_stderr\": 0.02888787239548795,\n \"acc_norm\": 0.8363636363636363,\n \"acc_norm_stderr\": 0.02888787239548795\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8737373737373737,\n \"acc_stderr\": 0.02366435940288022,\n \"acc_norm\": 0.8737373737373737,\n \"acc_norm_stderr\": 0.02366435940288022\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.02385479568097114,\n \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.02385479568097114\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35185185185185186,\n \"acc_stderr\": 0.02911661760608303,\n \"acc_norm\": 0.35185185185185186,\n \"acc_norm_stderr\": 0.02911661760608303\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.030066761582977934,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.030066761582977934\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8513761467889909,\n \"acc_stderr\": 0.015251253773660834,\n \"acc_norm\": 0.8513761467889909,\n \"acc_norm_stderr\": 0.015251253773660834\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5277777777777778,\n \"acc_stderr\": 0.0340470532865388,\n \"acc_norm\": 0.5277777777777778,\n \"acc_norm_stderr\": 0.0340470532865388\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.0251956584289318,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.0251956584289318\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.869198312236287,\n \"acc_stderr\": 0.021948766059470767,\n \"acc_norm\": 0.869198312236287,\n \"acc_norm_stderr\": 0.021948766059470767\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.7443946188340808,\n \"acc_stderr\": 0.029275891003969923,\n \"acc_norm\": 0.7443946188340808,\n \"acc_norm_stderr\": 0.029275891003969923\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7786259541984732,\n \"acc_stderr\": 0.03641297081313729,\n \"acc_norm\": 0.7786259541984732,\n \"acc_norm_stderr\": 0.03641297081313729\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.03520893951097653,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.03520893951097653\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.040191074725573483,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.040191074725573483\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7484662576687117,\n \"acc_stderr\": 0.03408997886857529,\n \"acc_norm\": 0.7484662576687117,\n \"acc_norm_stderr\": 0.03408997886857529\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.49107142857142855,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.49107142857142855,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8888888888888888,\n \"acc_stderr\": 0.020588491316092365,\n \"acc_norm\": 0.8888888888888888,\n \"acc_norm_stderr\": 0.020588491316092365\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8263090676883781,\n \"acc_stderr\": 0.013547415658662252,\n \"acc_norm\": 0.8263090676883781,\n \"acc_norm_stderr\": 0.013547415658662252\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.358659217877095,\n \"acc_stderr\": 0.016040454426164478,\n \"acc_norm\": 0.358659217877095,\n \"acc_norm_stderr\": 0.016040454426164478\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.023805186524888135,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.023805186524888135\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7138263665594855,\n \"acc_stderr\": 0.02567025924218894,\n \"acc_norm\": 0.7138263665594855,\n \"acc_norm_stderr\": 0.02567025924218894\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7716049382716049,\n \"acc_stderr\": 0.023358211840626267,\n \"acc_norm\": 0.7716049382716049,\n \"acc_norm_stderr\": 0.023358211840626267\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5212765957446809,\n \"acc_stderr\": 0.029800481645628693,\n \"acc_norm\": 0.5212765957446809,\n \"acc_norm_stderr\": 0.029800481645628693\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.5026075619295959,\n \"acc_stderr\": 0.012770062445433166,\n \"acc_norm\": 0.5026075619295959,\n \"acc_norm_stderr\": 0.012770062445433166\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7720588235294118,\n \"acc_stderr\": 0.0254830814680298,\n \"acc_norm\": 0.7720588235294118,\n \"acc_norm_stderr\": 0.0254830814680298\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6830065359477124,\n \"acc_stderr\": 0.018824219512706207,\n \"acc_norm\": 0.6830065359477124,\n \"acc_norm_stderr\": 0.018824219512706207\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7090909090909091,\n \"acc_stderr\": 0.04350271442923243,\n \"acc_norm\": 0.7090909090909091,\n \"acc_norm_stderr\": 0.04350271442923243\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.025607375986579157,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.025607375986579157\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466108,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466108\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5662650602409639,\n \"acc_stderr\": 0.03858158940685515,\n \"acc_norm\": 0.5662650602409639,\n \"acc_norm_stderr\": 0.03858158940685515\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.40758873929008566,\n \"mc1_stderr\": 0.017201949234553107,\n \"mc2\": 0.5779007093743166,\n \"mc2_stderr\": 0.015366654030614126\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8342541436464088,\n \"acc_stderr\": 0.010450899545370656\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6914329037149356,\n \"acc_stderr\": 0.0127230760498159\n }\n}\n```", "repo_url": "https://huggingface.co/bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-01-10.806510.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["**/details_harness|winogrande|5_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-01-10.806510.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_01_10.806510", "path": ["results_2024-01-04T14-01-10.806510.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-01-10.806510.parquet"]}]}]}
2024-01-04T14:03:51+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED Dataset automatically created during the evaluation run of model bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:01:10.806510(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED\n\n\n\nDataset automatically created during the evaluation run of model bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:01:10.806510(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED\n\n\n\nDataset automatically created during the evaluation run of model bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:01:10.806510(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 205, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED\n\n\n\nDataset automatically created during the evaluation run of model bn22/Nous-Hermes-2-SOLAR-10.7B-MISALIGNED on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:01:10.806510(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]" ]
89e093a30a24353fc0ffc0ace33e650df4129eb8
# Dataset Card for Evaluation run of Walmart-the-bag/WordWoven-13B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [Walmart-the-bag/WordWoven-13B](https://huggingface.co/Walmart-the-bag/WordWoven-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_Walmart-the-bag__WordWoven-13B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:04:01.998645](https://huggingface.co/datasets/open-llm-leaderboard/details_Walmart-the-bag__WordWoven-13B/blob/main/results_2024-01-04T14-04-01.998645.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6426229908497888, "acc_stderr": 0.032194876651038366, "acc_norm": 0.6446818377157361, "acc_norm_stderr": 0.032837192713734226, "mc1": 0.37576499388004897, "mc1_stderr": 0.016954584060214297, "mc2": 0.5445170161036526, "mc2_stderr": 0.015492435025282279 }, "harness|arc:challenge|25": { "acc": 0.6228668941979523, "acc_stderr": 0.014163366896192603, "acc_norm": 0.6612627986348123, "acc_norm_stderr": 0.01383056892797433 }, "harness|hellaswag|10": { "acc": 0.6697868950408286, "acc_stderr": 0.004693285694663838, "acc_norm": 0.8580959968133838, "acc_norm_stderr": 0.003482384956632779 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.39, "acc_stderr": 0.04902071300001975, "acc_norm": 0.39, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595853, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595853 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6973684210526315, "acc_stderr": 0.037385206761196686, "acc_norm": 0.6973684210526315, "acc_norm_stderr": 0.037385206761196686 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6943396226415094, "acc_stderr": 0.028353298073322666, "acc_norm": 0.6943396226415094, "acc_norm_stderr": 0.028353298073322666 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7152777777777778, "acc_stderr": 0.037738099906869334, "acc_norm": 0.7152777777777778, "acc_norm_stderr": 0.037738099906869334 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.05016135580465919, "acc_norm": 0.47, "acc_norm_stderr": 0.05016135580465919 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.3, "acc_stderr": 0.046056618647183814, "acc_norm": 0.3, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.653179190751445, "acc_stderr": 0.036291466701596636, "acc_norm": 0.653179190751445, "acc_norm_stderr": 0.036291466701596636 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.8, "acc_stderr": 0.04020151261036846, "acc_norm": 0.8, "acc_norm_stderr": 0.04020151261036846 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.574468085106383, "acc_stderr": 0.03232146916224469, "acc_norm": 0.574468085106383, "acc_norm_stderr": 0.03232146916224469 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5655172413793104, "acc_stderr": 0.04130740879555497, "acc_norm": 0.5655172413793104, "acc_norm_stderr": 0.04130740879555497 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3968253968253968, "acc_stderr": 0.025197101074246487, "acc_norm": 0.3968253968253968, "acc_norm_stderr": 0.025197101074246487 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04426266681379909, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04426266681379909 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7774193548387097, "acc_stderr": 0.023664216671642518, "acc_norm": 0.7774193548387097, "acc_norm_stderr": 0.023664216671642518 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621505, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621505 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7757575757575758, "acc_stderr": 0.03256866661681102, "acc_norm": 0.7757575757575758, "acc_norm_stderr": 0.03256866661681102 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7929292929292929, "acc_stderr": 0.028869778460267042, "acc_norm": 0.7929292929292929, "acc_norm_stderr": 0.028869778460267042 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9067357512953368, "acc_stderr": 0.020986854593289733, "acc_norm": 0.9067357512953368, "acc_norm_stderr": 0.020986854593289733 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6487179487179487, "acc_stderr": 0.024203665177902796, "acc_norm": 0.6487179487179487, "acc_norm_stderr": 0.024203665177902796 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.02857834836547308, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.02857834836547308 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6638655462184874, "acc_stderr": 0.03068473711513536, "acc_norm": 0.6638655462184874, "acc_norm_stderr": 0.03068473711513536 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8311926605504587, "acc_stderr": 0.016060056268530343, "acc_norm": 0.8311926605504587, "acc_norm_stderr": 0.016060056268530343 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.47685185185185186, "acc_stderr": 0.03406315360711507, "acc_norm": 0.47685185185185186, "acc_norm_stderr": 0.03406315360711507 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7990196078431373, "acc_stderr": 0.028125972265654373, "acc_norm": 0.7990196078431373, "acc_norm_stderr": 0.028125972265654373 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8185654008438819, "acc_stderr": 0.02508596114457966, "acc_norm": 0.8185654008438819, "acc_norm_stderr": 0.02508596114457966 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.030769352008229146, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.030769352008229146 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7633587786259542, "acc_stderr": 0.03727673575596914, "acc_norm": 0.7633587786259542, "acc_norm_stderr": 0.03727673575596914 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8016528925619835, "acc_stderr": 0.03640118271990946, "acc_norm": 0.8016528925619835, "acc_norm_stderr": 0.03640118271990946 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7914110429447853, "acc_stderr": 0.031921934489347235, "acc_norm": 0.7914110429447853, "acc_norm_stderr": 0.031921934489347235 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5178571428571429, "acc_stderr": 0.047427623612430116, "acc_norm": 0.5178571428571429, "acc_norm_stderr": 0.047427623612430116 }, "harness|hendrycksTest-management|5": { "acc": 0.7766990291262136, "acc_stderr": 0.04123553189891431, "acc_norm": 0.7766990291262136, "acc_norm_stderr": 0.04123553189891431 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8632478632478633, "acc_stderr": 0.022509033937077802, "acc_norm": 0.8632478632478633, "acc_norm_stderr": 0.022509033937077802 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8173690932311622, "acc_stderr": 0.013816335389973136, "acc_norm": 0.8173690932311622, "acc_norm_stderr": 0.013816335389973136 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7398843930635838, "acc_stderr": 0.023618678310069356, "acc_norm": 0.7398843930635838, "acc_norm_stderr": 0.023618678310069356 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.3564245810055866, "acc_stderr": 0.016018239710513405, "acc_norm": 0.3564245810055866, "acc_norm_stderr": 0.016018239710513405 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7418300653594772, "acc_stderr": 0.025058503316958147, "acc_norm": 0.7418300653594772, "acc_norm_stderr": 0.025058503316958147 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7376543209876543, "acc_stderr": 0.024477222856135114, "acc_norm": 0.7376543209876543, "acc_norm_stderr": 0.024477222856135114 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.45390070921985815, "acc_stderr": 0.029700453247291474, "acc_norm": 0.45390070921985815, "acc_norm_stderr": 0.029700453247291474 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4706649282920469, "acc_stderr": 0.01274823839736555, "acc_norm": 0.4706649282920469, "acc_norm_stderr": 0.01274823839736555 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6617647058823529, "acc_stderr": 0.028739328513983572, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.028739328513983572 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6797385620915033, "acc_stderr": 0.018875682938069443, "acc_norm": 0.6797385620915033, "acc_norm_stderr": 0.018875682938069443 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6363636363636364, "acc_stderr": 0.046075820907199756, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.046075820907199756 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.028263889943784586, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.028263889943784586 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616913, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616913 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.87, "acc_stderr": 0.033799766898963086, "acc_norm": 0.87, "acc_norm_stderr": 0.033799766898963086 }, "harness|hendrycksTest-virology|5": { "acc": 0.5421686746987951, "acc_stderr": 0.0387862677100236, "acc_norm": 0.5421686746987951, "acc_norm_stderr": 0.0387862677100236 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8187134502923976, "acc_stderr": 0.029547741687640044, "acc_norm": 0.8187134502923976, "acc_norm_stderr": 0.029547741687640044 }, "harness|truthfulqa:mc|0": { "mc1": 0.37576499388004897, "mc1_stderr": 0.016954584060214297, "mc2": 0.5445170161036526, "mc2_stderr": 0.015492435025282279 }, "harness|winogrande|5": { "acc": 0.7892659826361483, "acc_stderr": 0.011462046419710686 }, "harness|gsm8k|5": { "acc": 0.601213040181956, "acc_stderr": 0.013487360477060832 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_Walmart-the-bag__WordWoven-13B
[ "region:us" ]
2024-01-04T14:06:22+00:00
{"pretty_name": "Evaluation run of Walmart-the-bag/WordWoven-13B", "dataset_summary": "Dataset automatically created during the evaluation run of model [Walmart-the-bag/WordWoven-13B](https://huggingface.co/Walmart-the-bag/WordWoven-13B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_Walmart-the-bag__WordWoven-13B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:04:01.998645](https://huggingface.co/datasets/open-llm-leaderboard/details_Walmart-the-bag__WordWoven-13B/blob/main/results_2024-01-04T14-04-01.998645.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6426229908497888,\n \"acc_stderr\": 0.032194876651038366,\n \"acc_norm\": 0.6446818377157361,\n \"acc_norm_stderr\": 0.032837192713734226,\n \"mc1\": 0.37576499388004897,\n \"mc1_stderr\": 0.016954584060214297,\n \"mc2\": 0.5445170161036526,\n \"mc2_stderr\": 0.015492435025282279\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6228668941979523,\n \"acc_stderr\": 0.014163366896192603,\n \"acc_norm\": 0.6612627986348123,\n \"acc_norm_stderr\": 0.01383056892797433\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6697868950408286,\n \"acc_stderr\": 0.004693285694663838,\n \"acc_norm\": 0.8580959968133838,\n \"acc_norm_stderr\": 0.003482384956632779\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.39,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.39,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595853,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595853\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6973684210526315,\n \"acc_stderr\": 0.037385206761196686,\n \"acc_norm\": 0.6973684210526315,\n \"acc_norm_stderr\": 0.037385206761196686\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6943396226415094,\n \"acc_stderr\": 0.028353298073322666,\n \"acc_norm\": 0.6943396226415094,\n \"acc_norm_stderr\": 0.028353298073322666\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7152777777777778,\n \"acc_stderr\": 0.037738099906869334,\n \"acc_norm\": 0.7152777777777778,\n \"acc_norm_stderr\": 0.037738099906869334\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.05016135580465919,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.05016135580465919\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.3,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.3,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.653179190751445,\n \"acc_stderr\": 0.036291466701596636,\n \"acc_norm\": 0.653179190751445,\n \"acc_norm_stderr\": 0.036291466701596636\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.8,\n \"acc_stderr\": 0.04020151261036846,\n \"acc_norm\": 0.8,\n \"acc_norm_stderr\": 0.04020151261036846\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.574468085106383,\n \"acc_stderr\": 0.03232146916224469,\n \"acc_norm\": 0.574468085106383,\n \"acc_norm_stderr\": 0.03232146916224469\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5655172413793104,\n \"acc_stderr\": 0.04130740879555497,\n \"acc_norm\": 0.5655172413793104,\n \"acc_norm_stderr\": 0.04130740879555497\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3968253968253968,\n \"acc_stderr\": 0.025197101074246487,\n \"acc_norm\": 0.3968253968253968,\n \"acc_norm_stderr\": 0.025197101074246487\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04426266681379909,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04426266681379909\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7774193548387097,\n \"acc_stderr\": 0.023664216671642518,\n \"acc_norm\": 0.7774193548387097,\n \"acc_norm_stderr\": 0.023664216671642518\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621505,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621505\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7757575757575758,\n \"acc_stderr\": 0.03256866661681102,\n \"acc_norm\": 0.7757575757575758,\n \"acc_norm_stderr\": 0.03256866661681102\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7929292929292929,\n \"acc_stderr\": 0.028869778460267042,\n \"acc_norm\": 0.7929292929292929,\n \"acc_norm_stderr\": 0.028869778460267042\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9067357512953368,\n \"acc_stderr\": 0.020986854593289733,\n \"acc_norm\": 0.9067357512953368,\n \"acc_norm_stderr\": 0.020986854593289733\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6487179487179487,\n \"acc_stderr\": 0.024203665177902796,\n \"acc_norm\": 0.6487179487179487,\n \"acc_norm_stderr\": 0.024203665177902796\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32592592592592595,\n \"acc_stderr\": 0.02857834836547308,\n \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.02857834836547308\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6638655462184874,\n \"acc_stderr\": 0.03068473711513536,\n \"acc_norm\": 0.6638655462184874,\n \"acc_norm_stderr\": 0.03068473711513536\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8311926605504587,\n \"acc_stderr\": 0.016060056268530343,\n \"acc_norm\": 0.8311926605504587,\n \"acc_norm_stderr\": 0.016060056268530343\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.47685185185185186,\n \"acc_stderr\": 0.03406315360711507,\n \"acc_norm\": 0.47685185185185186,\n \"acc_norm_stderr\": 0.03406315360711507\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7990196078431373,\n \"acc_stderr\": 0.028125972265654373,\n \"acc_norm\": 0.7990196078431373,\n \"acc_norm_stderr\": 0.028125972265654373\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8185654008438819,\n \"acc_stderr\": 0.02508596114457966,\n \"acc_norm\": 0.8185654008438819,\n \"acc_norm_stderr\": 0.02508596114457966\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n \"acc_stderr\": 0.030769352008229146,\n \"acc_norm\": 0.6995515695067265,\n \"acc_norm_stderr\": 0.030769352008229146\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7633587786259542,\n \"acc_stderr\": 0.03727673575596914,\n \"acc_norm\": 0.7633587786259542,\n \"acc_norm_stderr\": 0.03727673575596914\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8016528925619835,\n \"acc_stderr\": 0.03640118271990946,\n \"acc_norm\": 0.8016528925619835,\n \"acc_norm_stderr\": 0.03640118271990946\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7914110429447853,\n \"acc_stderr\": 0.031921934489347235,\n \"acc_norm\": 0.7914110429447853,\n \"acc_norm_stderr\": 0.031921934489347235\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5178571428571429,\n \"acc_stderr\": 0.047427623612430116,\n \"acc_norm\": 0.5178571428571429,\n \"acc_norm_stderr\": 0.047427623612430116\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7766990291262136,\n \"acc_stderr\": 0.04123553189891431,\n \"acc_norm\": 0.7766990291262136,\n \"acc_norm_stderr\": 0.04123553189891431\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8632478632478633,\n \"acc_stderr\": 0.022509033937077802,\n \"acc_norm\": 0.8632478632478633,\n \"acc_norm_stderr\": 0.022509033937077802\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8173690932311622,\n \"acc_stderr\": 0.013816335389973136,\n \"acc_norm\": 0.8173690932311622,\n \"acc_norm_stderr\": 0.013816335389973136\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7398843930635838,\n \"acc_stderr\": 0.023618678310069356,\n \"acc_norm\": 0.7398843930635838,\n \"acc_norm_stderr\": 0.023618678310069356\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.3564245810055866,\n \"acc_stderr\": 0.016018239710513405,\n \"acc_norm\": 0.3564245810055866,\n \"acc_norm_stderr\": 0.016018239710513405\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7418300653594772,\n \"acc_stderr\": 0.025058503316958147,\n \"acc_norm\": 0.7418300653594772,\n \"acc_norm_stderr\": 0.025058503316958147\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7376543209876543,\n \"acc_stderr\": 0.024477222856135114,\n \"acc_norm\": 0.7376543209876543,\n \"acc_norm_stderr\": 0.024477222856135114\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.45390070921985815,\n \"acc_stderr\": 0.029700453247291474,\n \"acc_norm\": 0.45390070921985815,\n \"acc_norm_stderr\": 0.029700453247291474\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4706649282920469,\n \"acc_stderr\": 0.01274823839736555,\n \"acc_norm\": 0.4706649282920469,\n \"acc_norm_stderr\": 0.01274823839736555\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.028739328513983572,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.028739328513983572\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6797385620915033,\n \"acc_stderr\": 0.018875682938069443,\n \"acc_norm\": 0.6797385620915033,\n \"acc_norm_stderr\": 0.018875682938069443\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.046075820907199756,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.046075820907199756\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.028263889943784586,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.028263889943784586\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616913,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.87,\n \"acc_stderr\": 0.033799766898963086,\n \"acc_norm\": 0.87,\n \"acc_norm_stderr\": 0.033799766898963086\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5421686746987951,\n \"acc_stderr\": 0.0387862677100236,\n \"acc_norm\": 0.5421686746987951,\n \"acc_norm_stderr\": 0.0387862677100236\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8187134502923976,\n \"acc_stderr\": 0.029547741687640044,\n \"acc_norm\": 0.8187134502923976,\n \"acc_norm_stderr\": 0.029547741687640044\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.37576499388004897,\n \"mc1_stderr\": 0.016954584060214297,\n \"mc2\": 0.5445170161036526,\n \"mc2_stderr\": 0.015492435025282279\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7892659826361483,\n \"acc_stderr\": 0.011462046419710686\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.601213040181956,\n \"acc_stderr\": 0.013487360477060832\n }\n}\n```", "repo_url": "https://huggingface.co/Walmart-the-bag/WordWoven-13B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-04-01.998645.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["**/details_harness|winogrande|5_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-04-01.998645.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_04_01.998645", "path": ["results_2024-01-04T14-04-01.998645.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-04-01.998645.parquet"]}]}]}
2024-01-04T14:06:45+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of Walmart-the-bag/WordWoven-13B Dataset automatically created during the evaluation run of model Walmart-the-bag/WordWoven-13B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:04:01.998645(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of Walmart-the-bag/WordWoven-13B\n\n\n\nDataset automatically created during the evaluation run of model Walmart-the-bag/WordWoven-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:04:01.998645(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of Walmart-the-bag/WordWoven-13B\n\n\n\nDataset automatically created during the evaluation run of model Walmart-the-bag/WordWoven-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:04:01.998645(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 185, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of Walmart-the-bag/WordWoven-13B\n\n\n\nDataset automatically created during the evaluation run of model Walmart-the-bag/WordWoven-13B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:04:01.998645(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
37b6f4eee0bd2bb3dec0a0cc4a50139e301a4b45
# Dataset Card for Evaluation run of gagan3012/MetaModel <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [gagan3012/MetaModel](https://huggingface.co/gagan3012/MetaModel) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_gagan3012__MetaModel", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:09:43.780941](https://huggingface.co/datasets/open-llm-leaderboard/details_gagan3012__MetaModel/blob/main/results_2024-01-04T14-09-43.780941.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6664380298886512, "acc_stderr": 0.031642195230944255, "acc_norm": 0.6671639222858992, "acc_norm_stderr": 0.03228745343467652, "mc1": 0.5691554467564259, "mc1_stderr": 0.01733527247533237, "mc2": 0.7184177934834866, "mc2_stderr": 0.014995634120330182 }, "harness|arc:challenge|25": { "acc": 0.6843003412969283, "acc_stderr": 0.013582571095815291, "acc_norm": 0.7107508532423208, "acc_norm_stderr": 0.01325001257939344 }, "harness|hellaswag|10": { "acc": 0.7132045409281019, "acc_stderr": 0.004513409114983828, "acc_norm": 0.8844851623182632, "acc_norm_stderr": 0.0031898897894046684 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.43, "acc_stderr": 0.049756985195624284, "acc_norm": 0.43, "acc_norm_stderr": 0.049756985195624284 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.743421052631579, "acc_stderr": 0.0355418036802569, "acc_norm": 0.743421052631579, "acc_norm_stderr": 0.0355418036802569 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.02863723563980089, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.02863723563980089 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.48, "acc_stderr": 0.05021167315686781, "acc_norm": 0.48, "acc_norm_stderr": 0.05021167315686781 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6647398843930635, "acc_stderr": 0.03599586301247077, "acc_norm": 0.6647398843930635, "acc_norm_stderr": 0.03599586301247077 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107223, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107223 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.75, "acc_stderr": 0.04351941398892446, "acc_norm": 0.75, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.625531914893617, "acc_stderr": 0.03163910665367291, "acc_norm": 0.625531914893617, "acc_norm_stderr": 0.03163910665367291 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4824561403508772, "acc_stderr": 0.04700708033551038, "acc_norm": 0.4824561403508772, "acc_norm_stderr": 0.04700708033551038 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6413793103448275, "acc_stderr": 0.039966295748767186, "acc_norm": 0.6413793103448275, "acc_norm_stderr": 0.039966295748767186 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.5, "acc_stderr": 0.025751310131230234, "acc_norm": 0.5, "acc_norm_stderr": 0.025751310131230234 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.42857142857142855, "acc_stderr": 0.0442626668137991, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.0442626668137991 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8129032258064516, "acc_stderr": 0.022185710092252252, "acc_norm": 0.8129032258064516, "acc_norm_stderr": 0.022185710092252252 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5073891625615764, "acc_stderr": 0.035176035403610105, "acc_norm": 0.5073891625615764, "acc_norm_stderr": 0.035176035403610105 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8636363636363636, "acc_stderr": 0.024450155973189835, "acc_norm": 0.8636363636363636, "acc_norm_stderr": 0.024450155973189835 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.021995311963644244, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.021995311963644244 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6692307692307692, "acc_stderr": 0.02385479568097114, "acc_norm": 0.6692307692307692, "acc_norm_stderr": 0.02385479568097114 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.37037037037037035, "acc_stderr": 0.02944316932303154, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.02944316932303154 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7142857142857143, "acc_stderr": 0.029344572500634332, "acc_norm": 0.7142857142857143, "acc_norm_stderr": 0.029344572500634332 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3708609271523179, "acc_stderr": 0.03943966699183629, "acc_norm": 0.3708609271523179, "acc_norm_stderr": 0.03943966699183629 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8422018348623853, "acc_stderr": 0.01563002297009246, "acc_norm": 0.8422018348623853, "acc_norm_stderr": 0.01563002297009246 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5740740740740741, "acc_stderr": 0.03372343271653062, "acc_norm": 0.5740740740740741, "acc_norm_stderr": 0.03372343271653062 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8578431372549019, "acc_stderr": 0.02450980392156862, "acc_norm": 0.8578431372549019, "acc_norm_stderr": 0.02450980392156862 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8565400843881856, "acc_stderr": 0.022818291821017012, "acc_norm": 0.8565400843881856, "acc_norm_stderr": 0.022818291821017012 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.672645739910314, "acc_stderr": 0.03149384670994131, "acc_norm": 0.672645739910314, "acc_norm_stderr": 0.03149384670994131 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7557251908396947, "acc_stderr": 0.03768335959728743, "acc_norm": 0.7557251908396947, "acc_norm_stderr": 0.03768335959728743 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.037494924487096966, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.037494924487096966 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.754601226993865, "acc_stderr": 0.03380939813943354, "acc_norm": 0.754601226993865, "acc_norm_stderr": 0.03380939813943354 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.8446601941747572, "acc_stderr": 0.035865947385739734, "acc_norm": 0.8446601941747572, "acc_norm_stderr": 0.035865947385739734 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8589743589743589, "acc_stderr": 0.02280138253459753, "acc_norm": 0.8589743589743589, "acc_norm_stderr": 0.02280138253459753 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.7, "acc_stderr": 0.046056618647183814, "acc_norm": 0.7, "acc_norm_stderr": 0.046056618647183814 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8084291187739464, "acc_stderr": 0.014072859310451949, "acc_norm": 0.8084291187739464, "acc_norm_stderr": 0.014072859310451949 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7572254335260116, "acc_stderr": 0.023083658586984204, "acc_norm": 0.7572254335260116, "acc_norm_stderr": 0.023083658586984204 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.39664804469273746, "acc_stderr": 0.016361354769822468, "acc_norm": 0.39664804469273746, "acc_norm_stderr": 0.016361354769822468 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7581699346405228, "acc_stderr": 0.024518195641879334, "acc_norm": 0.7581699346405228, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7202572347266881, "acc_stderr": 0.025494259350694905, "acc_norm": 0.7202572347266881, "acc_norm_stderr": 0.025494259350694905 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7777777777777778, "acc_stderr": 0.02313237623454333, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.02313237623454333 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.5035460992907801, "acc_stderr": 0.02982674915328092, "acc_norm": 0.5035460992907801, "acc_norm_stderr": 0.02982674915328092 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.49478487614080835, "acc_stderr": 0.012769541449652547, "acc_norm": 0.49478487614080835, "acc_norm_stderr": 0.012769541449652547 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.75, "acc_stderr": 0.026303648393696036, "acc_norm": 0.75, "acc_norm_stderr": 0.026303648393696036 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6813725490196079, "acc_stderr": 0.018850084696468712, "acc_norm": 0.6813725490196079, "acc_norm_stderr": 0.018850084696468712 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910509, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910509 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.746938775510204, "acc_stderr": 0.027833023871399677, "acc_norm": 0.746938775510204, "acc_norm_stderr": 0.027833023871399677 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8258706467661692, "acc_stderr": 0.026814951200421603, "acc_norm": 0.8258706467661692, "acc_norm_stderr": 0.026814951200421603 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.91, "acc_stderr": 0.028762349126466125, "acc_norm": 0.91, "acc_norm_stderr": 0.028762349126466125 }, "harness|hendrycksTest-virology|5": { "acc": 0.5783132530120482, "acc_stderr": 0.038444531817709175, "acc_norm": 0.5783132530120482, "acc_norm_stderr": 0.038444531817709175 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03188578017686398, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03188578017686398 }, "harness|truthfulqa:mc|0": { "mc1": 0.5691554467564259, "mc1_stderr": 0.01733527247533237, "mc2": 0.7184177934834866, "mc2_stderr": 0.014995634120330182 }, "harness|winogrande|5": { "acc": 0.8342541436464088, "acc_stderr": 0.010450899545370632 }, "harness|gsm8k|5": { "acc": 0.6535253980288097, "acc_stderr": 0.013107179054313398 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_gagan3012__MetaModel
[ "region:us" ]
2024-01-04T14:11:58+00:00
{"pretty_name": "Evaluation run of gagan3012/MetaModel", "dataset_summary": "Dataset automatically created during the evaluation run of model [gagan3012/MetaModel](https://huggingface.co/gagan3012/MetaModel) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_gagan3012__MetaModel\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:09:43.780941](https://huggingface.co/datasets/open-llm-leaderboard/details_gagan3012__MetaModel/blob/main/results_2024-01-04T14-09-43.780941.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6664380298886512,\n \"acc_stderr\": 0.031642195230944255,\n \"acc_norm\": 0.6671639222858992,\n \"acc_norm_stderr\": 0.03228745343467652,\n \"mc1\": 0.5691554467564259,\n \"mc1_stderr\": 0.01733527247533237,\n \"mc2\": 0.7184177934834866,\n \"mc2_stderr\": 0.014995634120330182\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6843003412969283,\n \"acc_stderr\": 0.013582571095815291,\n \"acc_norm\": 0.7107508532423208,\n \"acc_norm_stderr\": 0.01325001257939344\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7132045409281019,\n \"acc_stderr\": 0.004513409114983828,\n \"acc_norm\": 0.8844851623182632,\n \"acc_norm_stderr\": 0.0031898897894046684\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.43,\n \"acc_stderr\": 0.049756985195624284,\n \"acc_norm\": 0.43,\n \"acc_norm_stderr\": 0.049756985195624284\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.743421052631579,\n \"acc_stderr\": 0.0355418036802569,\n \"acc_norm\": 0.743421052631579,\n \"acc_norm_stderr\": 0.0355418036802569\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.02863723563980089,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.02863723563980089\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.05021167315686781,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.05021167315686781\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6647398843930635,\n \"acc_stderr\": 0.03599586301247077,\n \"acc_norm\": 0.6647398843930635,\n \"acc_norm_stderr\": 0.03599586301247077\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107223,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107223\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.625531914893617,\n \"acc_stderr\": 0.03163910665367291,\n \"acc_norm\": 0.625531914893617,\n \"acc_norm_stderr\": 0.03163910665367291\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4824561403508772,\n \"acc_stderr\": 0.04700708033551038,\n \"acc_norm\": 0.4824561403508772,\n \"acc_norm_stderr\": 0.04700708033551038\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6413793103448275,\n \"acc_stderr\": 0.039966295748767186,\n \"acc_norm\": 0.6413793103448275,\n \"acc_norm_stderr\": 0.039966295748767186\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.025751310131230234,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.025751310131230234\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.0442626668137991,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.0442626668137991\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8129032258064516,\n \"acc_stderr\": 0.022185710092252252,\n \"acc_norm\": 0.8129032258064516,\n \"acc_norm_stderr\": 0.022185710092252252\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5073891625615764,\n \"acc_stderr\": 0.035176035403610105,\n \"acc_norm\": 0.5073891625615764,\n \"acc_norm_stderr\": 0.035176035403610105\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8636363636363636,\n \"acc_stderr\": 0.024450155973189835,\n \"acc_norm\": 0.8636363636363636,\n \"acc_norm_stderr\": 0.024450155973189835\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.021995311963644244,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.021995311963644244\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6692307692307692,\n \"acc_stderr\": 0.02385479568097114,\n \"acc_norm\": 0.6692307692307692,\n \"acc_norm_stderr\": 0.02385479568097114\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.02944316932303154,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.02944316932303154\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7142857142857143,\n \"acc_stderr\": 0.029344572500634332,\n \"acc_norm\": 0.7142857142857143,\n \"acc_norm_stderr\": 0.029344572500634332\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3708609271523179,\n \"acc_stderr\": 0.03943966699183629,\n \"acc_norm\": 0.3708609271523179,\n \"acc_norm_stderr\": 0.03943966699183629\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8422018348623853,\n \"acc_stderr\": 0.01563002297009246,\n \"acc_norm\": 0.8422018348623853,\n \"acc_norm_stderr\": 0.01563002297009246\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5740740740740741,\n \"acc_stderr\": 0.03372343271653062,\n \"acc_norm\": 0.5740740740740741,\n \"acc_norm_stderr\": 0.03372343271653062\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8578431372549019,\n \"acc_stderr\": 0.02450980392156862,\n \"acc_norm\": 0.8578431372549019,\n \"acc_norm_stderr\": 0.02450980392156862\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8565400843881856,\n \"acc_stderr\": 0.022818291821017012,\n \"acc_norm\": 0.8565400843881856,\n \"acc_norm_stderr\": 0.022818291821017012\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.672645739910314,\n \"acc_stderr\": 0.03149384670994131,\n \"acc_norm\": 0.672645739910314,\n \"acc_norm_stderr\": 0.03149384670994131\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7557251908396947,\n \"acc_stderr\": 0.03768335959728743,\n \"acc_norm\": 0.7557251908396947,\n \"acc_norm_stderr\": 0.03768335959728743\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.037494924487096966,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.037494924487096966\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.754601226993865,\n \"acc_stderr\": 0.03380939813943354,\n \"acc_norm\": 0.754601226993865,\n \"acc_norm_stderr\": 0.03380939813943354\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8446601941747572,\n \"acc_stderr\": 0.035865947385739734,\n \"acc_norm\": 0.8446601941747572,\n \"acc_norm_stderr\": 0.035865947385739734\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8589743589743589,\n \"acc_stderr\": 0.02280138253459753,\n \"acc_norm\": 0.8589743589743589,\n \"acc_norm_stderr\": 0.02280138253459753\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.7,\n \"acc_stderr\": 0.046056618647183814,\n \"acc_norm\": 0.7,\n \"acc_norm_stderr\": 0.046056618647183814\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8084291187739464,\n \"acc_stderr\": 0.014072859310451949,\n \"acc_norm\": 0.8084291187739464,\n \"acc_norm_stderr\": 0.014072859310451949\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7572254335260116,\n \"acc_stderr\": 0.023083658586984204,\n \"acc_norm\": 0.7572254335260116,\n \"acc_norm_stderr\": 0.023083658586984204\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.39664804469273746,\n \"acc_stderr\": 0.016361354769822468,\n \"acc_norm\": 0.39664804469273746,\n \"acc_norm_stderr\": 0.016361354769822468\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7581699346405228,\n \"acc_stderr\": 0.024518195641879334,\n \"acc_norm\": 0.7581699346405228,\n \"acc_norm_stderr\": 0.024518195641879334\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7202572347266881,\n \"acc_stderr\": 0.025494259350694905,\n \"acc_norm\": 0.7202572347266881,\n \"acc_norm_stderr\": 0.025494259350694905\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.02313237623454333,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.02313237623454333\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.5035460992907801,\n \"acc_stderr\": 0.02982674915328092,\n \"acc_norm\": 0.5035460992907801,\n \"acc_norm_stderr\": 0.02982674915328092\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.49478487614080835,\n \"acc_stderr\": 0.012769541449652547,\n \"acc_norm\": 0.49478487614080835,\n \"acc_norm_stderr\": 0.012769541449652547\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.026303648393696036,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.026303648393696036\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6813725490196079,\n \"acc_stderr\": 0.018850084696468712,\n \"acc_norm\": 0.6813725490196079,\n \"acc_norm_stderr\": 0.018850084696468712\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910509,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910509\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.746938775510204,\n \"acc_stderr\": 0.027833023871399677,\n \"acc_norm\": 0.746938775510204,\n \"acc_norm_stderr\": 0.027833023871399677\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8258706467661692,\n \"acc_stderr\": 0.026814951200421603,\n \"acc_norm\": 0.8258706467661692,\n \"acc_norm_stderr\": 0.026814951200421603\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.91,\n \"acc_stderr\": 0.028762349126466125,\n \"acc_norm\": 0.91,\n \"acc_norm_stderr\": 0.028762349126466125\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5783132530120482,\n \"acc_stderr\": 0.038444531817709175,\n \"acc_norm\": 0.5783132530120482,\n \"acc_norm_stderr\": 0.038444531817709175\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03188578017686398,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03188578017686398\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5691554467564259,\n \"mc1_stderr\": 0.01733527247533237,\n \"mc2\": 0.7184177934834866,\n \"mc2_stderr\": 0.014995634120330182\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8342541436464088,\n \"acc_stderr\": 0.010450899545370632\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6535253980288097,\n \"acc_stderr\": 0.013107179054313398\n }\n}\n```", "repo_url": "https://huggingface.co/gagan3012/MetaModel", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-09-43.780941.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["**/details_harness|winogrande|5_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-09-43.780941.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_09_43.780941", "path": ["results_2024-01-04T14-09-43.780941.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-09-43.780941.parquet"]}]}]}
2024-01-04T14:12:22+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of gagan3012/MetaModel Dataset automatically created during the evaluation run of model gagan3012/MetaModel on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:09:43.780941(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of gagan3012/MetaModel\n\n\n\nDataset automatically created during the evaluation run of model gagan3012/MetaModel on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:09:43.780941(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of gagan3012/MetaModel\n\n\n\nDataset automatically created during the evaluation run of model gagan3012/MetaModel on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:09:43.780941(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 175, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of gagan3012/MetaModel\n\n\n\nDataset automatically created during the evaluation run of model gagan3012/MetaModel on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:09:43.780941(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
67463329fb6f780eb5e03ea9e556a7e47c1d92d0
# Dataset Card for Evaluation run of castorini/rank_vicuna_7b_v1_fp16 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [castorini/rank_vicuna_7b_v1_fp16](https://huggingface.co/castorini/rank_vicuna_7b_v1_fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_castorini__rank_vicuna_7b_v1_fp16", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:11:17.677021](https://huggingface.co/datasets/open-llm-leaderboard/details_castorini__rank_vicuna_7b_v1_fp16/blob/main/results_2024-01-04T14-11-17.677021.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.43807693081233745, "acc_stderr": 0.034327867059302436, "acc_norm": 0.4451290963260708, "acc_norm_stderr": 0.03526514680417224, "mc1": 0.29865361077111385, "mc1_stderr": 0.016021570613768542, "mc2": 0.4512725152724823, "mc2_stderr": 0.015672269561043818 }, "harness|arc:challenge|25": { "acc": 0.4112627986348123, "acc_stderr": 0.014379441068522077, "acc_norm": 0.4462457337883959, "acc_norm_stderr": 0.014526705548539982 }, "harness|hellaswag|10": { "acc": 0.4856602270464051, "acc_stderr": 0.0049877289008975955, "acc_norm": 0.6567416849233221, "acc_norm_stderr": 0.004738264944737159 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.047937248544110196, "acc_norm": 0.35, "acc_norm_stderr": 0.047937248544110196 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.45185185185185184, "acc_stderr": 0.04299268905480864, "acc_norm": 0.45185185185185184, "acc_norm_stderr": 0.04299268905480864 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4276315789473684, "acc_stderr": 0.04026097083296558, "acc_norm": 0.4276315789473684, "acc_norm_stderr": 0.04026097083296558 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5018867924528302, "acc_stderr": 0.030772653642075664, "acc_norm": 0.5018867924528302, "acc_norm_stderr": 0.030772653642075664 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4444444444444444, "acc_stderr": 0.041553199555931467, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.041553199555931467 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.34, "acc_stderr": 0.047609522856952365, "acc_norm": 0.34, "acc_norm_stderr": 0.047609522856952365 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.0446196043338474, "acc_norm": 0.27, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.34104046242774566, "acc_stderr": 0.036146654241808254, "acc_norm": 0.34104046242774566, "acc_norm_stderr": 0.036146654241808254 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.21568627450980393, "acc_stderr": 0.04092563958237656, "acc_norm": 0.21568627450980393, "acc_norm_stderr": 0.04092563958237656 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.3617021276595745, "acc_stderr": 0.03141082197596241, "acc_norm": 0.3617021276595745, "acc_norm_stderr": 0.03141082197596241 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.041424397194893624, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.041424397194893624 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.46206896551724136, "acc_stderr": 0.04154659671707546, "acc_norm": 0.46206896551724136, "acc_norm_stderr": 0.04154659671707546 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2962962962962963, "acc_stderr": 0.02351729433596328, "acc_norm": 0.2962962962962963, "acc_norm_stderr": 0.02351729433596328 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.25396825396825395, "acc_stderr": 0.03893259610604674, "acc_norm": 0.25396825396825395, "acc_norm_stderr": 0.03893259610604674 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.49032258064516127, "acc_stderr": 0.02843867799890954, "acc_norm": 0.49032258064516127, "acc_norm_stderr": 0.02843867799890954 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3694581280788177, "acc_stderr": 0.03395970381998574, "acc_norm": 0.3694581280788177, "acc_norm_stderr": 0.03395970381998574 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.44, "acc_stderr": 0.04988876515698589, "acc_norm": 0.44, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.5212121212121212, "acc_stderr": 0.03900828913737301, "acc_norm": 0.5212121212121212, "acc_norm_stderr": 0.03900828913737301 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5757575757575758, "acc_stderr": 0.03521224908841585, "acc_norm": 0.5757575757575758, "acc_norm_stderr": 0.03521224908841585 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.5647668393782384, "acc_stderr": 0.035780381650085846, "acc_norm": 0.5647668393782384, "acc_norm_stderr": 0.035780381650085846 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4153846153846154, "acc_stderr": 0.024985354923102318, "acc_norm": 0.4153846153846154, "acc_norm_stderr": 0.024985354923102318 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.22592592592592592, "acc_stderr": 0.02549753263960955, "acc_norm": 0.22592592592592592, "acc_norm_stderr": 0.02549753263960955 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.41596638655462187, "acc_stderr": 0.03201650100739615, "acc_norm": 0.41596638655462187, "acc_norm_stderr": 0.03201650100739615 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.24503311258278146, "acc_stderr": 0.035118075718047245, "acc_norm": 0.24503311258278146, "acc_norm_stderr": 0.035118075718047245 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.5651376146788991, "acc_stderr": 0.021254631465609283, "acc_norm": 0.5651376146788991, "acc_norm_stderr": 0.021254631465609283 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.3611111111111111, "acc_stderr": 0.03275773486100999, "acc_norm": 0.3611111111111111, "acc_norm_stderr": 0.03275773486100999 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5294117647058824, "acc_stderr": 0.035032352963679944, "acc_norm": 0.5294117647058824, "acc_norm_stderr": 0.035032352963679944 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5274261603375527, "acc_stderr": 0.032498227183013026, "acc_norm": 0.5274261603375527, "acc_norm_stderr": 0.032498227183013026 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.515695067264574, "acc_stderr": 0.0335412657542081, "acc_norm": 0.515695067264574, "acc_norm_stderr": 0.0335412657542081 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.48854961832061067, "acc_stderr": 0.043841400240780176, "acc_norm": 0.48854961832061067, "acc_norm_stderr": 0.043841400240780176 }, "harness|hendrycksTest-international_law|5": { "acc": 0.5785123966942148, "acc_stderr": 0.045077322787750874, "acc_norm": 0.5785123966942148, "acc_norm_stderr": 0.045077322787750874 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04803752235190192, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04803752235190192 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4662576687116564, "acc_stderr": 0.03919415545048408, "acc_norm": 0.4662576687116564, "acc_norm_stderr": 0.03919415545048408 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.30357142857142855, "acc_stderr": 0.04364226155841044, "acc_norm": 0.30357142857142855, "acc_norm_stderr": 0.04364226155841044 }, "harness|hendrycksTest-management|5": { "acc": 0.5728155339805825, "acc_stderr": 0.04897957737781168, "acc_norm": 0.5728155339805825, "acc_norm_stderr": 0.04897957737781168 }, "harness|hendrycksTest-marketing|5": { "acc": 0.5683760683760684, "acc_stderr": 0.0324483553531149, "acc_norm": 0.5683760683760684, "acc_norm_stderr": 0.0324483553531149 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5862068965517241, "acc_stderr": 0.01761220408466376, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.01761220408466376 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.4797687861271676, "acc_stderr": 0.026897049996382868, "acc_norm": 0.4797687861271676, "acc_norm_stderr": 0.026897049996382868 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.264804469273743, "acc_stderr": 0.014756906483260666, "acc_norm": 0.264804469273743, "acc_norm_stderr": 0.014756906483260666 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.4738562091503268, "acc_stderr": 0.028590752958852394, "acc_norm": 0.4738562091503268, "acc_norm_stderr": 0.028590752958852394 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.5048231511254019, "acc_stderr": 0.028396770444111298, "acc_norm": 0.5048231511254019, "acc_norm_stderr": 0.028396770444111298 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.5555555555555556, "acc_stderr": 0.027648477877413327, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.027648477877413327 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.3191489361702128, "acc_stderr": 0.027807990141320186, "acc_norm": 0.3191489361702128, "acc_norm_stderr": 0.027807990141320186 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.3213820078226858, "acc_stderr": 0.011927581352265076, "acc_norm": 0.3213820078226858, "acc_norm_stderr": 0.011927581352265076 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.3897058823529412, "acc_stderr": 0.029624663581159696, "acc_norm": 0.3897058823529412, "acc_norm_stderr": 0.029624663581159696 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.4068627450980392, "acc_stderr": 0.01987380200506118, "acc_norm": 0.4068627450980392, "acc_norm_stderr": 0.01987380200506118 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.5, "acc_stderr": 0.04789131426105757, "acc_norm": 0.5, "acc_norm_stderr": 0.04789131426105757 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.5795918367346938, "acc_stderr": 0.03160106993449601, "acc_norm": 0.5795918367346938, "acc_norm_stderr": 0.03160106993449601 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6218905472636815, "acc_stderr": 0.034288678487786564, "acc_norm": 0.6218905472636815, "acc_norm_stderr": 0.034288678487786564 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.67, "acc_stderr": 0.04725815626252609, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252609 }, "harness|hendrycksTest-virology|5": { "acc": 0.42771084337349397, "acc_stderr": 0.038515976837185335, "acc_norm": 0.42771084337349397, "acc_norm_stderr": 0.038515976837185335 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.6023391812865497, "acc_stderr": 0.03753638955761691, "acc_norm": 0.6023391812865497, "acc_norm_stderr": 0.03753638955761691 }, "harness|truthfulqa:mc|0": { "mc1": 0.29865361077111385, "mc1_stderr": 0.016021570613768542, "mc2": 0.4512725152724823, "mc2_stderr": 0.015672269561043818 }, "harness|winogrande|5": { "acc": 0.6661404893449092, "acc_stderr": 0.013254029695143351 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_castorini__rank_vicuna_7b_v1_fp16
[ "region:us" ]
2024-01-04T14:13:41+00:00
{"pretty_name": "Evaluation run of castorini/rank_vicuna_7b_v1_fp16", "dataset_summary": "Dataset automatically created during the evaluation run of model [castorini/rank_vicuna_7b_v1_fp16](https://huggingface.co/castorini/rank_vicuna_7b_v1_fp16) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_castorini__rank_vicuna_7b_v1_fp16\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:11:17.677021](https://huggingface.co/datasets/open-llm-leaderboard/details_castorini__rank_vicuna_7b_v1_fp16/blob/main/results_2024-01-04T14-11-17.677021.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.43807693081233745,\n \"acc_stderr\": 0.034327867059302436,\n \"acc_norm\": 0.4451290963260708,\n \"acc_norm_stderr\": 0.03526514680417224,\n \"mc1\": 0.29865361077111385,\n \"mc1_stderr\": 0.016021570613768542,\n \"mc2\": 0.4512725152724823,\n \"mc2_stderr\": 0.015672269561043818\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.4112627986348123,\n \"acc_stderr\": 0.014379441068522077,\n \"acc_norm\": 0.4462457337883959,\n \"acc_norm_stderr\": 0.014526705548539982\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4856602270464051,\n \"acc_stderr\": 0.0049877289008975955,\n \"acc_norm\": 0.6567416849233221,\n \"acc_norm_stderr\": 0.004738264944737159\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.047937248544110196,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.047937248544110196\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.45185185185185184,\n \"acc_stderr\": 0.04299268905480864,\n \"acc_norm\": 0.45185185185185184,\n \"acc_norm_stderr\": 0.04299268905480864\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4276315789473684,\n \"acc_stderr\": 0.04026097083296558,\n \"acc_norm\": 0.4276315789473684,\n \"acc_norm_stderr\": 0.04026097083296558\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5018867924528302,\n \"acc_stderr\": 0.030772653642075664,\n \"acc_norm\": 0.5018867924528302,\n \"acc_norm_stderr\": 0.030772653642075664\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.041553199555931467,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.041553199555931467\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.047609522856952365,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.047609522856952365\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.34104046242774566,\n \"acc_stderr\": 0.036146654241808254,\n \"acc_norm\": 0.34104046242774566,\n \"acc_norm_stderr\": 0.036146654241808254\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.21568627450980393,\n \"acc_stderr\": 0.04092563958237656,\n \"acc_norm\": 0.21568627450980393,\n \"acc_norm_stderr\": 0.04092563958237656\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.3617021276595745,\n \"acc_stderr\": 0.03141082197596241,\n \"acc_norm\": 0.3617021276595745,\n \"acc_norm_stderr\": 0.03141082197596241\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.041424397194893624,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.041424397194893624\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.46206896551724136,\n \"acc_stderr\": 0.04154659671707546,\n \"acc_norm\": 0.46206896551724136,\n \"acc_norm_stderr\": 0.04154659671707546\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2962962962962963,\n \"acc_stderr\": 0.02351729433596328,\n \"acc_norm\": 0.2962962962962963,\n \"acc_norm_stderr\": 0.02351729433596328\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.25396825396825395,\n \"acc_stderr\": 0.03893259610604674,\n \"acc_norm\": 0.25396825396825395,\n \"acc_norm_stderr\": 0.03893259610604674\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.49032258064516127,\n \"acc_stderr\": 0.02843867799890954,\n \"acc_norm\": 0.49032258064516127,\n \"acc_norm_stderr\": 0.02843867799890954\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3694581280788177,\n \"acc_stderr\": 0.03395970381998574,\n \"acc_norm\": 0.3694581280788177,\n \"acc_norm_stderr\": 0.03395970381998574\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.44,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.44,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.5212121212121212,\n \"acc_stderr\": 0.03900828913737301,\n \"acc_norm\": 0.5212121212121212,\n \"acc_norm_stderr\": 0.03900828913737301\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5757575757575758,\n \"acc_stderr\": 0.03521224908841585,\n \"acc_norm\": 0.5757575757575758,\n \"acc_norm_stderr\": 0.03521224908841585\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.5647668393782384,\n \"acc_stderr\": 0.035780381650085846,\n \"acc_norm\": 0.5647668393782384,\n \"acc_norm_stderr\": 0.035780381650085846\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4153846153846154,\n \"acc_stderr\": 0.024985354923102318,\n \"acc_norm\": 0.4153846153846154,\n \"acc_norm_stderr\": 0.024985354923102318\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.22592592592592592,\n \"acc_stderr\": 0.02549753263960955,\n \"acc_norm\": 0.22592592592592592,\n \"acc_norm_stderr\": 0.02549753263960955\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.41596638655462187,\n \"acc_stderr\": 0.03201650100739615,\n \"acc_norm\": 0.41596638655462187,\n \"acc_norm_stderr\": 0.03201650100739615\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.24503311258278146,\n \"acc_stderr\": 0.035118075718047245,\n \"acc_norm\": 0.24503311258278146,\n \"acc_norm_stderr\": 0.035118075718047245\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.5651376146788991,\n \"acc_stderr\": 0.021254631465609283,\n \"acc_norm\": 0.5651376146788991,\n \"acc_norm_stderr\": 0.021254631465609283\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.3611111111111111,\n \"acc_stderr\": 0.03275773486100999,\n \"acc_norm\": 0.3611111111111111,\n \"acc_norm_stderr\": 0.03275773486100999\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.5294117647058824,\n \"acc_stderr\": 0.035032352963679944,\n \"acc_norm\": 0.5294117647058824,\n \"acc_norm_stderr\": 0.035032352963679944\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.5274261603375527,\n \"acc_stderr\": 0.032498227183013026,\n \"acc_norm\": 0.5274261603375527,\n \"acc_norm_stderr\": 0.032498227183013026\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.515695067264574,\n \"acc_stderr\": 0.0335412657542081,\n \"acc_norm\": 0.515695067264574,\n \"acc_norm_stderr\": 0.0335412657542081\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.48854961832061067,\n \"acc_stderr\": 0.043841400240780176,\n \"acc_norm\": 0.48854961832061067,\n \"acc_norm_stderr\": 0.043841400240780176\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.5785123966942148,\n \"acc_stderr\": 0.045077322787750874,\n \"acc_norm\": 0.5785123966942148,\n \"acc_norm_stderr\": 0.045077322787750874\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04803752235190192,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04803752235190192\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.4662576687116564,\n \"acc_stderr\": 0.03919415545048408,\n \"acc_norm\": 0.4662576687116564,\n \"acc_norm_stderr\": 0.03919415545048408\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.30357142857142855,\n \"acc_stderr\": 0.04364226155841044,\n \"acc_norm\": 0.30357142857142855,\n \"acc_norm_stderr\": 0.04364226155841044\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.5728155339805825,\n \"acc_stderr\": 0.04897957737781168,\n \"acc_norm\": 0.5728155339805825,\n \"acc_norm_stderr\": 0.04897957737781168\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.5683760683760684,\n \"acc_stderr\": 0.0324483553531149,\n \"acc_norm\": 0.5683760683760684,\n \"acc_norm_stderr\": 0.0324483553531149\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.01761220408466376,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.01761220408466376\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.4797687861271676,\n \"acc_stderr\": 0.026897049996382868,\n \"acc_norm\": 0.4797687861271676,\n \"acc_norm_stderr\": 0.026897049996382868\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.264804469273743,\n \"acc_stderr\": 0.014756906483260666,\n \"acc_norm\": 0.264804469273743,\n \"acc_norm_stderr\": 0.014756906483260666\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.4738562091503268,\n \"acc_stderr\": 0.028590752958852394,\n \"acc_norm\": 0.4738562091503268,\n \"acc_norm_stderr\": 0.028590752958852394\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.5048231511254019,\n \"acc_stderr\": 0.028396770444111298,\n \"acc_norm\": 0.5048231511254019,\n \"acc_norm_stderr\": 0.028396770444111298\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.027648477877413327,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.027648477877413327\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.3191489361702128,\n \"acc_stderr\": 0.027807990141320186,\n \"acc_norm\": 0.3191489361702128,\n \"acc_norm_stderr\": 0.027807990141320186\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.3213820078226858,\n \"acc_stderr\": 0.011927581352265076,\n \"acc_norm\": 0.3213820078226858,\n \"acc_norm_stderr\": 0.011927581352265076\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.3897058823529412,\n \"acc_stderr\": 0.029624663581159696,\n \"acc_norm\": 0.3897058823529412,\n \"acc_norm_stderr\": 0.029624663581159696\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.4068627450980392,\n \"acc_stderr\": 0.01987380200506118,\n \"acc_norm\": 0.4068627450980392,\n \"acc_norm_stderr\": 0.01987380200506118\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.04789131426105757,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.04789131426105757\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.5795918367346938,\n \"acc_stderr\": 0.03160106993449601,\n \"acc_norm\": 0.5795918367346938,\n \"acc_norm_stderr\": 0.03160106993449601\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6218905472636815,\n \"acc_stderr\": 0.034288678487786564,\n \"acc_norm\": 0.6218905472636815,\n \"acc_norm_stderr\": 0.034288678487786564\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252609\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.42771084337349397,\n \"acc_stderr\": 0.038515976837185335,\n \"acc_norm\": 0.42771084337349397,\n \"acc_norm_stderr\": 0.038515976837185335\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.6023391812865497,\n \"acc_stderr\": 0.03753638955761691,\n \"acc_norm\": 0.6023391812865497,\n \"acc_norm_stderr\": 0.03753638955761691\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.29865361077111385,\n \"mc1_stderr\": 0.016021570613768542,\n \"mc2\": 0.4512725152724823,\n \"mc2_stderr\": 0.015672269561043818\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6661404893449092,\n \"acc_stderr\": 0.013254029695143351\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/castorini/rank_vicuna_7b_v1_fp16", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-11-17.677021.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["**/details_harness|winogrande|5_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-11-17.677021.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_11_17.677021", "path": ["results_2024-01-04T14-11-17.677021.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-11-17.677021.parquet"]}]}]}
2024-01-04T14:14:04+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of castorini/rank_vicuna_7b_v1_fp16 Dataset automatically created during the evaluation run of model castorini/rank_vicuna_7b_v1_fp16 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:11:17.677021(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of castorini/rank_vicuna_7b_v1_fp16\n\n\n\nDataset automatically created during the evaluation run of model castorini/rank_vicuna_7b_v1_fp16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:11:17.677021(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of castorini/rank_vicuna_7b_v1_fp16\n\n\n\nDataset automatically created during the evaluation run of model castorini/rank_vicuna_7b_v1_fp16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:11:17.677021(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 197, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of castorini/rank_vicuna_7b_v1_fp16\n\n\n\nDataset automatically created during the evaluation run of model castorini/rank_vicuna_7b_v1_fp16 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:11:17.677021(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
1e74ebba038d6636d2139e6eabef8a9ef4ec44b9
# Dataset Card for Evaluation run of CultriX/MistralTrix-v1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [CultriX/MistralTrix-v1](https://huggingface.co/CultriX/MistralTrix-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_CultriX__MistralTrix-v1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:12:42.125276](https://huggingface.co/datasets/open-llm-leaderboard/details_CultriX__MistralTrix-v1/blob/main/results_2024-01-04T14-12-42.125276.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6564998567910528, "acc_stderr": 0.031945350992768864, "acc_norm": 0.657490239404343, "acc_norm_stderr": 0.03258974493181662, "mc1": 0.5703794369645043, "mc1_stderr": 0.017329234580409095, "mc2": 0.7073380102170026, "mc2_stderr": 0.015057567462509228 }, "harness|arc:challenge|25": { "acc": 0.7056313993174061, "acc_stderr": 0.01331852846053942, "acc_norm": 0.7226962457337884, "acc_norm_stderr": 0.013082095839059374 }, "harness|hellaswag|10": { "acc": 0.7173869747062338, "acc_stderr": 0.004493495872000111, "acc_norm": 0.8832901812387971, "acc_norm_stderr": 0.0032041800729423835 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.32, "acc_stderr": 0.04688261722621504, "acc_norm": 0.32, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6444444444444445, "acc_stderr": 0.04135176749720385, "acc_norm": 0.6444444444444445, "acc_norm_stderr": 0.04135176749720385 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6644736842105263, "acc_stderr": 0.038424985593952694, "acc_norm": 0.6644736842105263, "acc_norm_stderr": 0.038424985593952694 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.61, "acc_stderr": 0.04902071300001975, "acc_norm": 0.61, "acc_norm_stderr": 0.04902071300001975 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7245283018867924, "acc_stderr": 0.027495663683724057, "acc_norm": 0.7245283018867924, "acc_norm_stderr": 0.027495663683724057 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7638888888888888, "acc_stderr": 0.03551446610810826, "acc_norm": 0.7638888888888888, "acc_norm_stderr": 0.03551446610810826 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.46, "acc_stderr": 0.05009082659620333, "acc_norm": 0.46, "acc_norm_stderr": 0.05009082659620333 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.57, "acc_stderr": 0.04975698519562428, "acc_norm": 0.57, "acc_norm_stderr": 0.04975698519562428 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.28, "acc_stderr": 0.04512608598542127, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6878612716763006, "acc_stderr": 0.03533133389323657, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.03533133389323657 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.04913595201274498, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.04913595201274498 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.78, "acc_stderr": 0.041633319989322626, "acc_norm": 0.78, "acc_norm_stderr": 0.041633319989322626 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5914893617021276, "acc_stderr": 0.032134180267015755, "acc_norm": 0.5914893617021276, "acc_norm_stderr": 0.032134180267015755 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5586206896551724, "acc_stderr": 0.04137931034482757, "acc_norm": 0.5586206896551724, "acc_norm_stderr": 0.04137931034482757 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4312169312169312, "acc_stderr": 0.025506481698138215, "acc_norm": 0.4312169312169312, "acc_norm_stderr": 0.025506481698138215 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.48412698412698413, "acc_stderr": 0.04469881854072606, "acc_norm": 0.48412698412698413, "acc_norm_stderr": 0.04469881854072606 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7838709677419354, "acc_stderr": 0.023415293433568532, "acc_norm": 0.7838709677419354, "acc_norm_stderr": 0.023415293433568532 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5320197044334976, "acc_stderr": 0.035107665979592154, "acc_norm": 0.5320197044334976, "acc_norm_stderr": 0.035107665979592154 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.74, "acc_stderr": 0.04408440022768079, "acc_norm": 0.74, "acc_norm_stderr": 0.04408440022768079 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7636363636363637, "acc_stderr": 0.03317505930009181, "acc_norm": 0.7636363636363637, "acc_norm_stderr": 0.03317505930009181 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7727272727272727, "acc_stderr": 0.029857515673386414, "acc_norm": 0.7727272727272727, "acc_norm_stderr": 0.029857515673386414 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8963730569948186, "acc_stderr": 0.02199531196364424, "acc_norm": 0.8963730569948186, "acc_norm_stderr": 0.02199531196364424 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.676923076923077, "acc_stderr": 0.02371088850197057, "acc_norm": 0.676923076923077, "acc_norm_stderr": 0.02371088850197057 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.35555555555555557, "acc_stderr": 0.029185714949857416, "acc_norm": 0.35555555555555557, "acc_norm_stderr": 0.029185714949857416 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.680672268907563, "acc_stderr": 0.030283995525884396, "acc_norm": 0.680672268907563, "acc_norm_stderr": 0.030283995525884396 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242742, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242742 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8495412844036697, "acc_stderr": 0.015328563932669237, "acc_norm": 0.8495412844036697, "acc_norm_stderr": 0.015328563932669237 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5416666666666666, "acc_stderr": 0.03398110890294636, "acc_norm": 0.5416666666666666, "acc_norm_stderr": 0.03398110890294636 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8529411764705882, "acc_stderr": 0.024857478080250458, "acc_norm": 0.8529411764705882, "acc_norm_stderr": 0.024857478080250458 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.810126582278481, "acc_stderr": 0.025530100460233494, "acc_norm": 0.810126582278481, "acc_norm_stderr": 0.025530100460233494 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6995515695067265, "acc_stderr": 0.030769352008229143, "acc_norm": 0.6995515695067265, "acc_norm_stderr": 0.030769352008229143 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.8244274809160306, "acc_stderr": 0.03336820338476074, "acc_norm": 0.8244274809160306, "acc_norm_stderr": 0.03336820338476074 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7777777777777778, "acc_stderr": 0.0401910747255735, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.0401910747255735 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7852760736196319, "acc_stderr": 0.032262193772867744, "acc_norm": 0.7852760736196319, "acc_norm_stderr": 0.032262193772867744 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.42857142857142855, "acc_stderr": 0.04697113923010212, "acc_norm": 0.42857142857142855, "acc_norm_stderr": 0.04697113923010212 }, "harness|hendrycksTest-management|5": { "acc": 0.7864077669902912, "acc_stderr": 0.040580420156460344, "acc_norm": 0.7864077669902912, "acc_norm_stderr": 0.040580420156460344 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8803418803418803, "acc_stderr": 0.021262719400406957, "acc_norm": 0.8803418803418803, "acc_norm_stderr": 0.021262719400406957 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.71, "acc_stderr": 0.045604802157206845, "acc_norm": 0.71, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8275862068965517, "acc_stderr": 0.013507943909371803, "acc_norm": 0.8275862068965517, "acc_norm_stderr": 0.013507943909371803 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7225433526011561, "acc_stderr": 0.024105712607754307, "acc_norm": 0.7225433526011561, "acc_norm_stderr": 0.024105712607754307 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.46145251396648046, "acc_stderr": 0.01667273126755226, "acc_norm": 0.46145251396648046, "acc_norm_stderr": 0.01667273126755226 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7450980392156863, "acc_stderr": 0.02495418432487991, "acc_norm": 0.7450980392156863, "acc_norm_stderr": 0.02495418432487991 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7106109324758842, "acc_stderr": 0.025755865922632945, "acc_norm": 0.7106109324758842, "acc_norm_stderr": 0.025755865922632945 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7314814814814815, "acc_stderr": 0.024659685185967284, "acc_norm": 0.7314814814814815, "acc_norm_stderr": 0.024659685185967284 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48226950354609927, "acc_stderr": 0.02980873964223777, "acc_norm": 0.48226950354609927, "acc_norm_stderr": 0.02980873964223777 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.46870925684485004, "acc_stderr": 0.012745204626083136, "acc_norm": 0.46870925684485004, "acc_norm_stderr": 0.012745204626083136 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6948529411764706, "acc_stderr": 0.027971541370170595, "acc_norm": 0.6948529411764706, "acc_norm_stderr": 0.027971541370170595 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6617647058823529, "acc_stderr": 0.01913994374848704, "acc_norm": 0.6617647058823529, "acc_norm_stderr": 0.01913994374848704 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616914, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616914 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.03487350880197771, "acc_norm": 0.86, "acc_norm_stderr": 0.03487350880197771 }, "harness|hendrycksTest-virology|5": { "acc": 0.5602409638554217, "acc_stderr": 0.03864139923699121, "acc_norm": 0.5602409638554217, "acc_norm_stderr": 0.03864139923699121 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8304093567251462, "acc_stderr": 0.02878210810540171, "acc_norm": 0.8304093567251462, "acc_norm_stderr": 0.02878210810540171 }, "harness|truthfulqa:mc|0": { "mc1": 0.5703794369645043, "mc1_stderr": 0.017329234580409095, "mc2": 0.7073380102170026, "mc2_stderr": 0.015057567462509228 }, "harness|winogrande|5": { "acc": 0.8097868981846882, "acc_stderr": 0.011030335798617443 }, "harness|gsm8k|5": { "acc": 0.6277482941622441, "acc_stderr": 0.013315375362565038 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_CultriX__MistralTrix-v1
[ "region:us" ]
2024-01-04T14:15:04+00:00
{"pretty_name": "Evaluation run of CultriX/MistralTrix-v1", "dataset_summary": "Dataset automatically created during the evaluation run of model [CultriX/MistralTrix-v1](https://huggingface.co/CultriX/MistralTrix-v1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_CultriX__MistralTrix-v1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:12:42.125276](https://huggingface.co/datasets/open-llm-leaderboard/details_CultriX__MistralTrix-v1/blob/main/results_2024-01-04T14-12-42.125276.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6564998567910528,\n \"acc_stderr\": 0.031945350992768864,\n \"acc_norm\": 0.657490239404343,\n \"acc_norm_stderr\": 0.03258974493181662,\n \"mc1\": 0.5703794369645043,\n \"mc1_stderr\": 0.017329234580409095,\n \"mc2\": 0.7073380102170026,\n \"mc2_stderr\": 0.015057567462509228\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.7056313993174061,\n \"acc_stderr\": 0.01331852846053942,\n \"acc_norm\": 0.7226962457337884,\n \"acc_norm_stderr\": 0.013082095839059374\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7173869747062338,\n \"acc_stderr\": 0.004493495872000111,\n \"acc_norm\": 0.8832901812387971,\n \"acc_norm_stderr\": 0.0032041800729423835\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6444444444444445,\n \"acc_stderr\": 0.04135176749720385,\n \"acc_norm\": 0.6444444444444445,\n \"acc_norm_stderr\": 0.04135176749720385\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6644736842105263,\n \"acc_stderr\": 0.038424985593952694,\n \"acc_norm\": 0.6644736842105263,\n \"acc_norm_stderr\": 0.038424985593952694\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.61,\n \"acc_stderr\": 0.04902071300001975,\n \"acc_norm\": 0.61,\n \"acc_norm_stderr\": 0.04902071300001975\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7245283018867924,\n \"acc_stderr\": 0.027495663683724057,\n \"acc_norm\": 0.7245283018867924,\n \"acc_norm_stderr\": 0.027495663683724057\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7638888888888888,\n \"acc_stderr\": 0.03551446610810826,\n \"acc_norm\": 0.7638888888888888,\n \"acc_norm_stderr\": 0.03551446610810826\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.46,\n \"acc_stderr\": 0.05009082659620333,\n \"acc_norm\": 0.46,\n \"acc_norm_stderr\": 0.05009082659620333\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.57,\n \"acc_stderr\": 0.04975698519562428,\n \"acc_norm\": 0.57,\n \"acc_norm_stderr\": 0.04975698519562428\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.03533133389323657,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.03533133389323657\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.04913595201274498,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.04913595201274498\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.78,\n \"acc_stderr\": 0.041633319989322626,\n \"acc_norm\": 0.78,\n \"acc_norm_stderr\": 0.041633319989322626\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5914893617021276,\n \"acc_stderr\": 0.032134180267015755,\n \"acc_norm\": 0.5914893617021276,\n \"acc_norm_stderr\": 0.032134180267015755\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5586206896551724,\n \"acc_stderr\": 0.04137931034482757,\n \"acc_norm\": 0.5586206896551724,\n \"acc_norm_stderr\": 0.04137931034482757\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4312169312169312,\n \"acc_stderr\": 0.025506481698138215,\n \"acc_norm\": 0.4312169312169312,\n \"acc_norm_stderr\": 0.025506481698138215\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.48412698412698413,\n \"acc_stderr\": 0.04469881854072606,\n \"acc_norm\": 0.48412698412698413,\n \"acc_norm_stderr\": 0.04469881854072606\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7838709677419354,\n \"acc_stderr\": 0.023415293433568532,\n \"acc_norm\": 0.7838709677419354,\n \"acc_norm_stderr\": 0.023415293433568532\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5320197044334976,\n \"acc_stderr\": 0.035107665979592154,\n \"acc_norm\": 0.5320197044334976,\n \"acc_norm_stderr\": 0.035107665979592154\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.04408440022768079,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.04408440022768079\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7636363636363637,\n \"acc_stderr\": 0.03317505930009181,\n \"acc_norm\": 0.7636363636363637,\n \"acc_norm_stderr\": 0.03317505930009181\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7727272727272727,\n \"acc_stderr\": 0.029857515673386414,\n \"acc_norm\": 0.7727272727272727,\n \"acc_norm_stderr\": 0.029857515673386414\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8963730569948186,\n \"acc_stderr\": 0.02199531196364424,\n \"acc_norm\": 0.8963730569948186,\n \"acc_norm_stderr\": 0.02199531196364424\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.676923076923077,\n \"acc_stderr\": 0.02371088850197057,\n \"acc_norm\": 0.676923076923077,\n \"acc_norm_stderr\": 0.02371088850197057\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.35555555555555557,\n \"acc_stderr\": 0.029185714949857416,\n \"acc_norm\": 0.35555555555555557,\n \"acc_norm_stderr\": 0.029185714949857416\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.680672268907563,\n \"acc_stderr\": 0.030283995525884396,\n \"acc_norm\": 0.680672268907563,\n \"acc_norm_stderr\": 0.030283995525884396\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242742,\n \"acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242742\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8495412844036697,\n \"acc_stderr\": 0.015328563932669237,\n \"acc_norm\": 0.8495412844036697,\n \"acc_norm_stderr\": 0.015328563932669237\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5416666666666666,\n \"acc_stderr\": 0.03398110890294636,\n \"acc_norm\": 0.5416666666666666,\n \"acc_norm_stderr\": 0.03398110890294636\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8529411764705882,\n \"acc_stderr\": 0.024857478080250458,\n \"acc_norm\": 0.8529411764705882,\n \"acc_norm_stderr\": 0.024857478080250458\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.810126582278481,\n \"acc_stderr\": 0.025530100460233494,\n \"acc_norm\": 0.810126582278481,\n \"acc_norm_stderr\": 0.025530100460233494\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6995515695067265,\n \"acc_stderr\": 0.030769352008229143,\n \"acc_norm\": 0.6995515695067265,\n \"acc_norm_stderr\": 0.030769352008229143\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.8244274809160306,\n \"acc_stderr\": 0.03336820338476074,\n \"acc_norm\": 0.8244274809160306,\n \"acc_norm_stderr\": 0.03336820338476074\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.0401910747255735,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.0401910747255735\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.032262193772867744,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.032262193772867744\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.42857142857142855,\n \"acc_stderr\": 0.04697113923010212,\n \"acc_norm\": 0.42857142857142855,\n \"acc_norm_stderr\": 0.04697113923010212\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7864077669902912,\n \"acc_stderr\": 0.040580420156460344,\n \"acc_norm\": 0.7864077669902912,\n \"acc_norm_stderr\": 0.040580420156460344\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8803418803418803,\n \"acc_stderr\": 0.021262719400406957,\n \"acc_norm\": 0.8803418803418803,\n \"acc_norm_stderr\": 0.021262719400406957\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.71,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.71,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8275862068965517,\n \"acc_stderr\": 0.013507943909371803,\n \"acc_norm\": 0.8275862068965517,\n \"acc_norm_stderr\": 0.013507943909371803\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7225433526011561,\n \"acc_stderr\": 0.024105712607754307,\n \"acc_norm\": 0.7225433526011561,\n \"acc_norm_stderr\": 0.024105712607754307\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.46145251396648046,\n \"acc_stderr\": 0.01667273126755226,\n \"acc_norm\": 0.46145251396648046,\n \"acc_norm_stderr\": 0.01667273126755226\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7450980392156863,\n \"acc_stderr\": 0.02495418432487991,\n \"acc_norm\": 0.7450980392156863,\n \"acc_norm_stderr\": 0.02495418432487991\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7106109324758842,\n \"acc_stderr\": 0.025755865922632945,\n \"acc_norm\": 0.7106109324758842,\n \"acc_norm_stderr\": 0.025755865922632945\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7314814814814815,\n \"acc_stderr\": 0.024659685185967284,\n \"acc_norm\": 0.7314814814814815,\n \"acc_norm_stderr\": 0.024659685185967284\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48226950354609927,\n \"acc_stderr\": 0.02980873964223777,\n \"acc_norm\": 0.48226950354609927,\n \"acc_norm_stderr\": 0.02980873964223777\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.46870925684485004,\n \"acc_stderr\": 0.012745204626083136,\n \"acc_norm\": 0.46870925684485004,\n \"acc_norm_stderr\": 0.012745204626083136\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6948529411764706,\n \"acc_stderr\": 0.027971541370170595,\n \"acc_norm\": 0.6948529411764706,\n \"acc_norm_stderr\": 0.027971541370170595\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6617647058823529,\n \"acc_stderr\": 0.01913994374848704,\n \"acc_norm\": 0.6617647058823529,\n \"acc_norm_stderr\": 0.01913994374848704\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616914,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616914\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.03487350880197771,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.03487350880197771\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5602409638554217,\n \"acc_stderr\": 0.03864139923699121,\n \"acc_norm\": 0.5602409638554217,\n \"acc_norm_stderr\": 0.03864139923699121\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8304093567251462,\n \"acc_stderr\": 0.02878210810540171,\n \"acc_norm\": 0.8304093567251462,\n \"acc_norm_stderr\": 0.02878210810540171\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5703794369645043,\n \"mc1_stderr\": 0.017329234580409095,\n \"mc2\": 0.7073380102170026,\n \"mc2_stderr\": 0.015057567462509228\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8097868981846882,\n \"acc_stderr\": 0.011030335798617443\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6277482941622441,\n \"acc_stderr\": 0.013315375362565038\n }\n}\n```", "repo_url": "https://huggingface.co/CultriX/MistralTrix-v1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-12-42.125276.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["**/details_harness|winogrande|5_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-12-42.125276.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_12_42.125276", "path": ["results_2024-01-04T14-12-42.125276.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-12-42.125276.parquet"]}]}]}
2024-01-04T14:15:29+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of CultriX/MistralTrix-v1 Dataset automatically created during the evaluation run of model CultriX/MistralTrix-v1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:12:42.125276(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of CultriX/MistralTrix-v1\n\n\n\nDataset automatically created during the evaluation run of model CultriX/MistralTrix-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:12:42.125276(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of CultriX/MistralTrix-v1\n\n\n\nDataset automatically created during the evaluation run of model CultriX/MistralTrix-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:12:42.125276(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 183, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of CultriX/MistralTrix-v1\n\n\n\nDataset automatically created during the evaluation run of model CultriX/MistralTrix-v1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:12:42.125276(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
0f70ca4d8f75f8751b89897e2c4e1b02d2bb06ca
# Dataset Card for Evaluation run of xaviviro/FLAMA-0.5-3B <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [xaviviro/FLAMA-0.5-3B](https://huggingface.co/xaviviro/FLAMA-0.5-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_xaviviro__FLAMA-0.5-3B", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:14:38.357079](https://huggingface.co/datasets/open-llm-leaderboard/details_xaviviro__FLAMA-0.5-3B/blob/main/results_2024-01-04T14-14-38.357079.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2647131817157082, "acc_stderr": 0.031070487329395803, "acc_norm": 0.2664900529641518, "acc_norm_stderr": 0.03184837841149458, "mc1": 0.26805385556915545, "mc1_stderr": 0.015506204722834557, "mc2": 0.4111139604848093, "mc2_stderr": 0.014893834532788594 }, "harness|arc:challenge|25": { "acc": 0.3532423208191126, "acc_stderr": 0.013967822714840055, "acc_norm": 0.3796928327645051, "acc_norm_stderr": 0.014182119866974874 }, "harness|hellaswag|10": { "acc": 0.4980083648675563, "acc_stderr": 0.004989741826250387, "acc_norm": 0.676458872734515, "acc_norm_stderr": 0.00466871068919241 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.2740740740740741, "acc_stderr": 0.03853254836552003, "acc_norm": 0.2740740740740741, "acc_norm_stderr": 0.03853254836552003 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.2565789473684211, "acc_stderr": 0.035541803680256896, "acc_norm": 0.2565789473684211, "acc_norm_stderr": 0.035541803680256896 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.22, "acc_stderr": 0.04163331998932269, "acc_norm": 0.22, "acc_norm_stderr": 0.04163331998932269 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2830188679245283, "acc_stderr": 0.027724236492700904, "acc_norm": 0.2830188679245283, "acc_norm_stderr": 0.027724236492700904 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2569444444444444, "acc_stderr": 0.03653946969442099, "acc_norm": 0.2569444444444444, "acc_norm_stderr": 0.03653946969442099 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.24, "acc_stderr": 0.04292346959909284, "acc_norm": 0.24, "acc_norm_stderr": 0.04292346959909284 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.26, "acc_stderr": 0.0440844002276808, "acc_norm": 0.26, "acc_norm_stderr": 0.0440844002276808 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.26, "acc_stderr": 0.044084400227680794, "acc_norm": 0.26, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.2254335260115607, "acc_stderr": 0.03186209851641144, "acc_norm": 0.2254335260115607, "acc_norm_stderr": 0.03186209851641144 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.32, "acc_stderr": 0.046882617226215034, "acc_norm": 0.32, "acc_norm_stderr": 0.046882617226215034 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.22127659574468084, "acc_stderr": 0.027136349602424056, "acc_norm": 0.22127659574468084, "acc_norm_stderr": 0.027136349602424056 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2543859649122807, "acc_stderr": 0.04096985139843673, "acc_norm": 0.2543859649122807, "acc_norm_stderr": 0.04096985139843673 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.2482758620689655, "acc_stderr": 0.03600105692727771, "acc_norm": 0.2482758620689655, "acc_norm_stderr": 0.03600105692727771 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2698412698412698, "acc_stderr": 0.02286083830923207, "acc_norm": 0.2698412698412698, "acc_norm_stderr": 0.02286083830923207 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.20634920634920634, "acc_stderr": 0.03619604524124251, "acc_norm": 0.20634920634920634, "acc_norm_stderr": 0.03619604524124251 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.23870967741935484, "acc_stderr": 0.02425107126220884, "acc_norm": 0.23870967741935484, "acc_norm_stderr": 0.02425107126220884 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2660098522167488, "acc_stderr": 0.03108982600293753, "acc_norm": 0.2660098522167488, "acc_norm_stderr": 0.03108982600293753 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2606060606060606, "acc_stderr": 0.034277431758165236, "acc_norm": 0.2606060606060606, "acc_norm_stderr": 0.034277431758165236 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.3434343434343434, "acc_stderr": 0.033832012232444426, "acc_norm": 0.3434343434343434, "acc_norm_stderr": 0.033832012232444426 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.21761658031088082, "acc_stderr": 0.029778663037752954, "acc_norm": 0.21761658031088082, "acc_norm_stderr": 0.029778663037752954 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.258974358974359, "acc_stderr": 0.02221110681006166, "acc_norm": 0.258974358974359, "acc_norm_stderr": 0.02221110681006166 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.2777777777777778, "acc_stderr": 0.027309140588230182, "acc_norm": 0.2777777777777778, "acc_norm_stderr": 0.027309140588230182 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.19327731092436976, "acc_stderr": 0.025649470265889193, "acc_norm": 0.19327731092436976, "acc_norm_stderr": 0.025649470265889193 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2781456953642384, "acc_stderr": 0.03658603262763743, "acc_norm": 0.2781456953642384, "acc_norm_stderr": 0.03658603262763743 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.23853211009174313, "acc_stderr": 0.01827257581023186, "acc_norm": 0.23853211009174313, "acc_norm_stderr": 0.01827257581023186 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.22685185185185186, "acc_stderr": 0.02856165010242227, "acc_norm": 0.22685185185185186, "acc_norm_stderr": 0.02856165010242227 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.2549019607843137, "acc_stderr": 0.030587591351604243, "acc_norm": 0.2549019607843137, "acc_norm_stderr": 0.030587591351604243 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.21518987341772153, "acc_stderr": 0.026750826994676187, "acc_norm": 0.21518987341772153, "acc_norm_stderr": 0.026750826994676187 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.18834080717488788, "acc_stderr": 0.026241132996407256, "acc_norm": 0.18834080717488788, "acc_norm_stderr": 0.026241132996407256 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.20610687022900764, "acc_stderr": 0.035477710041594654, "acc_norm": 0.20610687022900764, "acc_norm_stderr": 0.035477710041594654 }, "harness|hendrycksTest-international_law|5": { "acc": 0.3884297520661157, "acc_stderr": 0.04449270350068382, "acc_norm": 0.3884297520661157, "acc_norm_stderr": 0.04449270350068382 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.28703703703703703, "acc_stderr": 0.043733130409147614, "acc_norm": 0.28703703703703703, "acc_norm_stderr": 0.043733130409147614 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2883435582822086, "acc_stderr": 0.03559039531617342, "acc_norm": 0.2883435582822086, "acc_norm_stderr": 0.03559039531617342 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467762, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467762 }, "harness|hendrycksTest-management|5": { "acc": 0.2524271844660194, "acc_stderr": 0.04301250399690878, "acc_norm": 0.2524271844660194, "acc_norm_stderr": 0.04301250399690878 }, "harness|hendrycksTest-marketing|5": { "acc": 0.26495726495726496, "acc_stderr": 0.028911208802749465, "acc_norm": 0.26495726495726496, "acc_norm_stderr": 0.028911208802749465 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.18, "acc_stderr": 0.038612291966536955, "acc_norm": 0.18, "acc_norm_stderr": 0.038612291966536955 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.2950191570881226, "acc_stderr": 0.01630836377293272, "acc_norm": 0.2950191570881226, "acc_norm_stderr": 0.01630836377293272 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.2774566473988439, "acc_stderr": 0.024105712607754307, "acc_norm": 0.2774566473988439, "acc_norm_stderr": 0.024105712607754307 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.27262569832402234, "acc_stderr": 0.014893391735249588, "acc_norm": 0.27262569832402234, "acc_norm_stderr": 0.014893391735249588 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24183006535947713, "acc_stderr": 0.024518195641879334, "acc_norm": 0.24183006535947713, "acc_norm_stderr": 0.024518195641879334 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.2990353697749196, "acc_stderr": 0.026003301117885135, "acc_norm": 0.2990353697749196, "acc_norm_stderr": 0.026003301117885135 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.25617283950617287, "acc_stderr": 0.0242885336377261, "acc_norm": 0.25617283950617287, "acc_norm_stderr": 0.0242885336377261 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2801418439716312, "acc_stderr": 0.026789172351140235, "acc_norm": 0.2801418439716312, "acc_norm_stderr": 0.026789172351140235 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.24445893089960888, "acc_stderr": 0.01097642501311389, "acc_norm": 0.24445893089960888, "acc_norm_stderr": 0.01097642501311389 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.18382352941176472, "acc_stderr": 0.02352924218519311, "acc_norm": 0.18382352941176472, "acc_norm_stderr": 0.02352924218519311 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.26633986928104575, "acc_stderr": 0.0178831881346672, "acc_norm": 0.26633986928104575, "acc_norm_stderr": 0.0178831881346672 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.2545454545454545, "acc_stderr": 0.04172343038705383, "acc_norm": 0.2545454545454545, "acc_norm_stderr": 0.04172343038705383 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.2857142857142857, "acc_stderr": 0.028920583220675585, "acc_norm": 0.2857142857142857, "acc_norm_stderr": 0.028920583220675585 }, "harness|hendrycksTest-sociology|5": { "acc": 0.22885572139303484, "acc_stderr": 0.029705284056772432, "acc_norm": 0.22885572139303484, "acc_norm_stderr": 0.029705284056772432 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-virology|5": { "acc": 0.2710843373493976, "acc_stderr": 0.034605799075530255, "acc_norm": 0.2710843373493976, "acc_norm_stderr": 0.034605799075530255 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.27485380116959063, "acc_stderr": 0.034240429246915824, "acc_norm": 0.27485380116959063, "acc_norm_stderr": 0.034240429246915824 }, "harness|truthfulqa:mc|0": { "mc1": 0.26805385556915545, "mc1_stderr": 0.015506204722834557, "mc2": 0.4111139604848093, "mc2_stderr": 0.014893834532788594 }, "harness|winogrande|5": { "acc": 0.6211523283346487, "acc_stderr": 0.01363372460318033 }, "harness|gsm8k|5": { "acc": 0.008339651250947688, "acc_stderr": 0.0025049422268605234 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_xaviviro__FLAMA-0.5-3B
[ "region:us" ]
2024-01-04T14:16:21+00:00
{"pretty_name": "Evaluation run of xaviviro/FLAMA-0.5-3B", "dataset_summary": "Dataset automatically created during the evaluation run of model [xaviviro/FLAMA-0.5-3B](https://huggingface.co/xaviviro/FLAMA-0.5-3B) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_xaviviro__FLAMA-0.5-3B\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:14:38.357079](https://huggingface.co/datasets/open-llm-leaderboard/details_xaviviro__FLAMA-0.5-3B/blob/main/results_2024-01-04T14-14-38.357079.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2647131817157082,\n \"acc_stderr\": 0.031070487329395803,\n \"acc_norm\": 0.2664900529641518,\n \"acc_norm_stderr\": 0.03184837841149458,\n \"mc1\": 0.26805385556915545,\n \"mc1_stderr\": 0.015506204722834557,\n \"mc2\": 0.4111139604848093,\n \"mc2_stderr\": 0.014893834532788594\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.3532423208191126,\n \"acc_stderr\": 0.013967822714840055,\n \"acc_norm\": 0.3796928327645051,\n \"acc_norm_stderr\": 0.014182119866974874\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4980083648675563,\n \"acc_stderr\": 0.004989741826250387,\n \"acc_norm\": 0.676458872734515,\n \"acc_norm_stderr\": 0.00466871068919241\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.2740740740740741,\n \"acc_stderr\": 0.03853254836552003,\n \"acc_norm\": 0.2740740740740741,\n \"acc_norm_stderr\": 0.03853254836552003\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.2565789473684211,\n \"acc_stderr\": 0.035541803680256896,\n \"acc_norm\": 0.2565789473684211,\n \"acc_norm_stderr\": 0.035541803680256896\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.22,\n \"acc_stderr\": 0.04163331998932269,\n \"acc_norm\": 0.22,\n \"acc_norm_stderr\": 0.04163331998932269\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2830188679245283,\n \"acc_stderr\": 0.027724236492700904,\n \"acc_norm\": 0.2830188679245283,\n \"acc_norm_stderr\": 0.027724236492700904\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2569444444444444,\n \"acc_stderr\": 0.03653946969442099,\n \"acc_norm\": 0.2569444444444444,\n \"acc_norm_stderr\": 0.03653946969442099\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.24,\n \"acc_stderr\": 0.04292346959909284,\n \"acc_norm\": 0.24,\n \"acc_norm_stderr\": 0.04292346959909284\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.0440844002276808,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.0440844002276808\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.2254335260115607,\n \"acc_stderr\": 0.03186209851641144,\n \"acc_norm\": 0.2254335260115607,\n \"acc_norm_stderr\": 0.03186209851641144\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.32,\n \"acc_stderr\": 0.046882617226215034,\n \"acc_norm\": 0.32,\n \"acc_norm_stderr\": 0.046882617226215034\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.22127659574468084,\n \"acc_stderr\": 0.027136349602424056,\n \"acc_norm\": 0.22127659574468084,\n \"acc_norm_stderr\": 0.027136349602424056\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2543859649122807,\n \"acc_stderr\": 0.04096985139843673,\n \"acc_norm\": 0.2543859649122807,\n \"acc_norm_stderr\": 0.04096985139843673\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.2482758620689655,\n \"acc_stderr\": 0.03600105692727771,\n \"acc_norm\": 0.2482758620689655,\n \"acc_norm_stderr\": 0.03600105692727771\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2698412698412698,\n \"acc_stderr\": 0.02286083830923207,\n \"acc_norm\": 0.2698412698412698,\n \"acc_norm_stderr\": 0.02286083830923207\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.20634920634920634,\n \"acc_stderr\": 0.03619604524124251,\n \"acc_norm\": 0.20634920634920634,\n \"acc_norm_stderr\": 0.03619604524124251\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.23870967741935484,\n \"acc_stderr\": 0.02425107126220884,\n \"acc_norm\": 0.23870967741935484,\n \"acc_norm_stderr\": 0.02425107126220884\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2660098522167488,\n \"acc_stderr\": 0.03108982600293753,\n \"acc_norm\": 0.2660098522167488,\n \"acc_norm_stderr\": 0.03108982600293753\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2606060606060606,\n \"acc_stderr\": 0.034277431758165236,\n \"acc_norm\": 0.2606060606060606,\n \"acc_norm_stderr\": 0.034277431758165236\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.3434343434343434,\n \"acc_stderr\": 0.033832012232444426,\n \"acc_norm\": 0.3434343434343434,\n \"acc_norm_stderr\": 0.033832012232444426\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.21761658031088082,\n \"acc_stderr\": 0.029778663037752954,\n \"acc_norm\": 0.21761658031088082,\n \"acc_norm_stderr\": 0.029778663037752954\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.258974358974359,\n \"acc_stderr\": 0.02221110681006166,\n \"acc_norm\": 0.258974358974359,\n \"acc_norm_stderr\": 0.02221110681006166\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.2777777777777778,\n \"acc_stderr\": 0.027309140588230182,\n \"acc_norm\": 0.2777777777777778,\n \"acc_norm_stderr\": 0.027309140588230182\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.19327731092436976,\n \"acc_stderr\": 0.025649470265889193,\n \"acc_norm\": 0.19327731092436976,\n \"acc_norm_stderr\": 0.025649470265889193\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2781456953642384,\n \"acc_stderr\": 0.03658603262763743,\n \"acc_norm\": 0.2781456953642384,\n \"acc_norm_stderr\": 0.03658603262763743\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.23853211009174313,\n \"acc_stderr\": 0.01827257581023186,\n \"acc_norm\": 0.23853211009174313,\n \"acc_norm_stderr\": 0.01827257581023186\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.22685185185185186,\n \"acc_stderr\": 0.02856165010242227,\n \"acc_norm\": 0.22685185185185186,\n \"acc_norm_stderr\": 0.02856165010242227\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.2549019607843137,\n \"acc_stderr\": 0.030587591351604243,\n \"acc_norm\": 0.2549019607843137,\n \"acc_norm_stderr\": 0.030587591351604243\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.21518987341772153,\n \"acc_stderr\": 0.026750826994676187,\n \"acc_norm\": 0.21518987341772153,\n \"acc_norm_stderr\": 0.026750826994676187\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.18834080717488788,\n \"acc_stderr\": 0.026241132996407256,\n \"acc_norm\": 0.18834080717488788,\n \"acc_norm_stderr\": 0.026241132996407256\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.20610687022900764,\n \"acc_stderr\": 0.035477710041594654,\n \"acc_norm\": 0.20610687022900764,\n \"acc_norm_stderr\": 0.035477710041594654\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.3884297520661157,\n \"acc_stderr\": 0.04449270350068382,\n \"acc_norm\": 0.3884297520661157,\n \"acc_norm_stderr\": 0.04449270350068382\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.28703703703703703,\n \"acc_stderr\": 0.043733130409147614,\n \"acc_norm\": 0.28703703703703703,\n \"acc_norm_stderr\": 0.043733130409147614\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2883435582822086,\n \"acc_stderr\": 0.03559039531617342,\n \"acc_norm\": 0.2883435582822086,\n \"acc_norm_stderr\": 0.03559039531617342\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n \"acc_stderr\": 0.04203277291467762,\n \"acc_norm\": 0.26785714285714285,\n \"acc_norm_stderr\": 0.04203277291467762\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.2524271844660194,\n \"acc_stderr\": 0.04301250399690878,\n \"acc_norm\": 0.2524271844660194,\n \"acc_norm_stderr\": 0.04301250399690878\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.26495726495726496,\n \"acc_stderr\": 0.028911208802749465,\n \"acc_norm\": 0.26495726495726496,\n \"acc_norm_stderr\": 0.028911208802749465\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.038612291966536955,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.038612291966536955\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.2950191570881226,\n \"acc_stderr\": 0.01630836377293272,\n \"acc_norm\": 0.2950191570881226,\n \"acc_norm_stderr\": 0.01630836377293272\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.2774566473988439,\n \"acc_stderr\": 0.024105712607754307,\n \"acc_norm\": 0.2774566473988439,\n \"acc_norm_stderr\": 0.024105712607754307\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.27262569832402234,\n \"acc_stderr\": 0.014893391735249588,\n \"acc_norm\": 0.27262569832402234,\n \"acc_norm_stderr\": 0.014893391735249588\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24183006535947713,\n \"acc_stderr\": 0.024518195641879334,\n \"acc_norm\": 0.24183006535947713,\n \"acc_norm_stderr\": 0.024518195641879334\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.2990353697749196,\n \"acc_stderr\": 0.026003301117885135,\n \"acc_norm\": 0.2990353697749196,\n \"acc_norm_stderr\": 0.026003301117885135\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.25617283950617287,\n \"acc_stderr\": 0.0242885336377261,\n \"acc_norm\": 0.25617283950617287,\n \"acc_norm_stderr\": 0.0242885336377261\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2801418439716312,\n \"acc_stderr\": 0.026789172351140235,\n \"acc_norm\": 0.2801418439716312,\n \"acc_norm_stderr\": 0.026789172351140235\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.24445893089960888,\n \"acc_stderr\": 0.01097642501311389,\n \"acc_norm\": 0.24445893089960888,\n \"acc_norm_stderr\": 0.01097642501311389\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.18382352941176472,\n \"acc_stderr\": 0.02352924218519311,\n \"acc_norm\": 0.18382352941176472,\n \"acc_norm_stderr\": 0.02352924218519311\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.26633986928104575,\n \"acc_stderr\": 0.0178831881346672,\n \"acc_norm\": 0.26633986928104575,\n \"acc_norm_stderr\": 0.0178831881346672\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.2545454545454545,\n \"acc_stderr\": 0.04172343038705383,\n \"acc_norm\": 0.2545454545454545,\n \"acc_norm_stderr\": 0.04172343038705383\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.2857142857142857,\n \"acc_stderr\": 0.028920583220675585,\n \"acc_norm\": 0.2857142857142857,\n \"acc_norm_stderr\": 0.028920583220675585\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.22885572139303484,\n \"acc_stderr\": 0.029705284056772432,\n \"acc_norm\": 0.22885572139303484,\n \"acc_norm_stderr\": 0.029705284056772432\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.2710843373493976,\n \"acc_stderr\": 0.034605799075530255,\n \"acc_norm\": 0.2710843373493976,\n \"acc_norm_stderr\": 0.034605799075530255\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.27485380116959063,\n \"acc_stderr\": 0.034240429246915824,\n \"acc_norm\": 0.27485380116959063,\n \"acc_norm_stderr\": 0.034240429246915824\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.26805385556915545,\n \"mc1_stderr\": 0.015506204722834557,\n \"mc2\": 0.4111139604848093,\n \"mc2_stderr\": 0.014893834532788594\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6211523283346487,\n \"acc_stderr\": 0.01363372460318033\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.008339651250947688,\n \"acc_stderr\": 0.0025049422268605234\n }\n}\n```", "repo_url": "https://huggingface.co/xaviviro/FLAMA-0.5-3B", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-14-38.357079.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["**/details_harness|winogrande|5_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-14-38.357079.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_14_38.357079", "path": ["results_2024-01-04T14-14-38.357079.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-14-38.357079.parquet"]}]}]}
2024-01-04T14:16:48+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of xaviviro/FLAMA-0.5-3B Dataset automatically created during the evaluation run of model xaviviro/FLAMA-0.5-3B on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:14:38.357079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of xaviviro/FLAMA-0.5-3B\n\n\n\nDataset automatically created during the evaluation run of model xaviviro/FLAMA-0.5-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:14:38.357079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of xaviviro/FLAMA-0.5-3B\n\n\n\nDataset automatically created during the evaluation run of model xaviviro/FLAMA-0.5-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:14:38.357079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 181, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of xaviviro/FLAMA-0.5-3B\n\n\n\nDataset automatically created during the evaluation run of model xaviviro/FLAMA-0.5-3B on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:14:38.357079(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
99e205eed106fd9cb209c6273fe1d51608950996
# Dataset Card for Evaluation run of jae24/openhermes_dpo_norobot_0201 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [jae24/openhermes_dpo_norobot_0201](https://huggingface.co/jae24/openhermes_dpo_norobot_0201) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_jae24__openhermes_dpo_norobot_0201", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:15:33.723990](https://huggingface.co/datasets/open-llm-leaderboard/details_jae24__openhermes_dpo_norobot_0201/blob/main/results_2024-01-04T14-15-33.723990.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6230441962103045, "acc_stderr": 0.0325156991551045, "acc_norm": 0.6274562240705078, "acc_norm_stderr": 0.033162684621809393, "mc1": 0.2913096695226438, "mc1_stderr": 0.01590598704818483, "mc2": 0.474388925160649, "mc2_stderr": 0.014635683515771682 }, "harness|arc:challenge|25": { "acc": 0.5597269624573379, "acc_stderr": 0.014506769524804237, "acc_norm": 0.6203071672354948, "acc_norm_stderr": 0.01418211986697487 }, "harness|hellaswag|10": { "acc": 0.6061541525592511, "acc_stderr": 0.004876028037941937, "acc_norm": 0.8339972117108145, "acc_norm_stderr": 0.003713227064225387 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5703703703703704, "acc_stderr": 0.042763494943765995, "acc_norm": 0.5703703703703704, "acc_norm_stderr": 0.042763494943765995 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.6644736842105263, "acc_stderr": 0.038424985593952694, "acc_norm": 0.6644736842105263, "acc_norm_stderr": 0.038424985593952694 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.56, "acc_stderr": 0.04988876515698589, "acc_norm": 0.56, "acc_norm_stderr": 0.04988876515698589 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.660377358490566, "acc_stderr": 0.02914690474779833, "acc_norm": 0.660377358490566, "acc_norm_stderr": 0.02914690474779833 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7361111111111112, "acc_stderr": 0.03685651095897532, "acc_norm": 0.7361111111111112, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6127167630057804, "acc_stderr": 0.03714325906302065, "acc_norm": 0.6127167630057804, "acc_norm_stderr": 0.03714325906302065 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.38235294117647056, "acc_stderr": 0.04835503696107224, "acc_norm": 0.38235294117647056, "acc_norm_stderr": 0.04835503696107224 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.74, "acc_stderr": 0.044084400227680794, "acc_norm": 0.74, "acc_norm_stderr": 0.044084400227680794 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5404255319148936, "acc_stderr": 0.03257901482099835, "acc_norm": 0.5404255319148936, "acc_norm_stderr": 0.03257901482099835 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.4473684210526316, "acc_stderr": 0.046774730044911984, "acc_norm": 0.4473684210526316, "acc_norm_stderr": 0.046774730044911984 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.4896551724137931, "acc_stderr": 0.04165774775728763, "acc_norm": 0.4896551724137931, "acc_norm_stderr": 0.04165774775728763 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.41798941798941797, "acc_stderr": 0.025402555503260912, "acc_norm": 0.41798941798941797, "acc_norm_stderr": 0.025402555503260912 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4444444444444444, "acc_stderr": 0.04444444444444449, "acc_norm": 0.4444444444444444, "acc_norm_stderr": 0.04444444444444449 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145633, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145633 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7612903225806451, "acc_stderr": 0.024251071262208837, "acc_norm": 0.7612903225806451, "acc_norm_stderr": 0.024251071262208837 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5517241379310345, "acc_stderr": 0.034991131376767445, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.034991131376767445 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542127, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542127 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7515151515151515, "acc_stderr": 0.033744026441394036, "acc_norm": 0.7515151515151515, "acc_norm_stderr": 0.033744026441394036 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7777777777777778, "acc_stderr": 0.02962022787479049, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.02962022787479049 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8549222797927462, "acc_stderr": 0.025416343096306422, "acc_norm": 0.8549222797927462, "acc_norm_stderr": 0.025416343096306422 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6051282051282051, "acc_stderr": 0.024784316942156402, "acc_norm": 0.6051282051282051, "acc_norm_stderr": 0.024784316942156402 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948485, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948485 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6512605042016807, "acc_stderr": 0.030956636328566545, "acc_norm": 0.6512605042016807, "acc_norm_stderr": 0.030956636328566545 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33774834437086093, "acc_stderr": 0.038615575462551684, "acc_norm": 0.33774834437086093, "acc_norm_stderr": 0.038615575462551684 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8311926605504587, "acc_stderr": 0.01606005626853034, "acc_norm": 0.8311926605504587, "acc_norm_stderr": 0.01606005626853034 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.48148148148148145, "acc_stderr": 0.03407632093854052, "acc_norm": 0.48148148148148145, "acc_norm_stderr": 0.03407632093854052 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7892156862745098, "acc_stderr": 0.02862654791243741, "acc_norm": 0.7892156862745098, "acc_norm_stderr": 0.02862654791243741 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7679324894514767, "acc_stderr": 0.02747974455080851, "acc_norm": 0.7679324894514767, "acc_norm_stderr": 0.02747974455080851 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.031381476375754995, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.031381476375754995 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7709923664122137, "acc_stderr": 0.036853466317118506, "acc_norm": 0.7709923664122137, "acc_norm_stderr": 0.036853466317118506 }, "harness|hendrycksTest-international_law|5": { "acc": 0.768595041322314, "acc_stderr": 0.03849856098794088, "acc_norm": 0.768595041322314, "acc_norm_stderr": 0.03849856098794088 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7685185185185185, "acc_stderr": 0.04077494709252626, "acc_norm": 0.7685185185185185, "acc_norm_stderr": 0.04077494709252626 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7852760736196319, "acc_stderr": 0.032262193772867744, "acc_norm": 0.7852760736196319, "acc_norm_stderr": 0.032262193772867744 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.5089285714285714, "acc_stderr": 0.04745033255489123, "acc_norm": 0.5089285714285714, "acc_norm_stderr": 0.04745033255489123 }, "harness|hendrycksTest-management|5": { "acc": 0.8155339805825242, "acc_stderr": 0.03840423627288276, "acc_norm": 0.8155339805825242, "acc_norm_stderr": 0.03840423627288276 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8376068376068376, "acc_stderr": 0.024161618127987745, "acc_norm": 0.8376068376068376, "acc_norm_stderr": 0.024161618127987745 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8148148148148148, "acc_stderr": 0.013890862162876168, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.013890862162876168 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7052023121387283, "acc_stderr": 0.024547617794803828, "acc_norm": 0.7052023121387283, "acc_norm_stderr": 0.024547617794803828 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.25139664804469275, "acc_stderr": 0.014508979453553984, "acc_norm": 0.25139664804469275, "acc_norm_stderr": 0.014508979453553984 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7352941176470589, "acc_stderr": 0.02526169121972948, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.02526169121972948 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6816720257234726, "acc_stderr": 0.026457225067811025, "acc_norm": 0.6816720257234726, "acc_norm_stderr": 0.026457225067811025 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7345679012345679, "acc_stderr": 0.024569223600460845, "acc_norm": 0.7345679012345679, "acc_norm_stderr": 0.024569223600460845 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4574468085106383, "acc_stderr": 0.029719281272236837, "acc_norm": 0.4574468085106383, "acc_norm_stderr": 0.029719281272236837 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4595827900912647, "acc_stderr": 0.012728446067669975, "acc_norm": 0.4595827900912647, "acc_norm_stderr": 0.012728446067669975 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.6544117647058824, "acc_stderr": 0.02888819310398863, "acc_norm": 0.6544117647058824, "acc_norm_stderr": 0.02888819310398863 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6699346405228758, "acc_stderr": 0.019023726160724556, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.019023726160724556 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6545454545454545, "acc_stderr": 0.04554619617541054, "acc_norm": 0.6545454545454545, "acc_norm_stderr": 0.04554619617541054 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.6979591836734694, "acc_stderr": 0.0293936093198798, "acc_norm": 0.6979591836734694, "acc_norm_stderr": 0.0293936093198798 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8109452736318408, "acc_stderr": 0.027686913588013024, "acc_norm": 0.8109452736318408, "acc_norm_stderr": 0.027686913588013024 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.034873508801977704, "acc_norm": 0.86, "acc_norm_stderr": 0.034873508801977704 }, "harness|hendrycksTest-virology|5": { "acc": 0.5481927710843374, "acc_stderr": 0.03874371556587953, "acc_norm": 0.5481927710843374, "acc_norm_stderr": 0.03874371556587953 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8362573099415205, "acc_stderr": 0.028380919596145866, "acc_norm": 0.8362573099415205, "acc_norm_stderr": 0.028380919596145866 }, "harness|truthfulqa:mc|0": { "mc1": 0.2913096695226438, "mc1_stderr": 0.01590598704818483, "mc2": 0.474388925160649, "mc2_stderr": 0.014635683515771682 }, "harness|winogrande|5": { "acc": 0.7821625887924231, "acc_stderr": 0.011601066079939324 }, "harness|gsm8k|5": { "acc": 0.4920394238059136, "acc_stderr": 0.01377073906313537 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_jae24__openhermes_dpo_norobot_0201
[ "region:us" ]
2024-01-04T14:17:53+00:00
{"pretty_name": "Evaluation run of jae24/openhermes_dpo_norobot_0201", "dataset_summary": "Dataset automatically created during the evaluation run of model [jae24/openhermes_dpo_norobot_0201](https://huggingface.co/jae24/openhermes_dpo_norobot_0201) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_jae24__openhermes_dpo_norobot_0201\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:15:33.723990](https://huggingface.co/datasets/open-llm-leaderboard/details_jae24__openhermes_dpo_norobot_0201/blob/main/results_2024-01-04T14-15-33.723990.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6230441962103045,\n \"acc_stderr\": 0.0325156991551045,\n \"acc_norm\": 0.6274562240705078,\n \"acc_norm_stderr\": 0.033162684621809393,\n \"mc1\": 0.2913096695226438,\n \"mc1_stderr\": 0.01590598704818483,\n \"mc2\": 0.474388925160649,\n \"mc2_stderr\": 0.014635683515771682\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5597269624573379,\n \"acc_stderr\": 0.014506769524804237,\n \"acc_norm\": 0.6203071672354948,\n \"acc_norm_stderr\": 0.01418211986697487\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6061541525592511,\n \"acc_stderr\": 0.004876028037941937,\n \"acc_norm\": 0.8339972117108145,\n \"acc_norm_stderr\": 0.003713227064225387\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5703703703703704,\n \"acc_stderr\": 0.042763494943765995,\n \"acc_norm\": 0.5703703703703704,\n \"acc_norm_stderr\": 0.042763494943765995\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.6644736842105263,\n \"acc_stderr\": 0.038424985593952694,\n \"acc_norm\": 0.6644736842105263,\n \"acc_norm_stderr\": 0.038424985593952694\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.56,\n \"acc_stderr\": 0.04988876515698589,\n \"acc_norm\": 0.56,\n \"acc_norm_stderr\": 0.04988876515698589\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.660377358490566,\n \"acc_stderr\": 0.02914690474779833,\n \"acc_norm\": 0.660377358490566,\n \"acc_norm_stderr\": 0.02914690474779833\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7361111111111112,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.7361111111111112,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6127167630057804,\n \"acc_stderr\": 0.03714325906302065,\n \"acc_norm\": 0.6127167630057804,\n \"acc_norm_stderr\": 0.03714325906302065\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.38235294117647056,\n \"acc_stderr\": 0.04835503696107224,\n \"acc_norm\": 0.38235294117647056,\n \"acc_norm_stderr\": 0.04835503696107224\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.74,\n \"acc_stderr\": 0.044084400227680794,\n \"acc_norm\": 0.74,\n \"acc_norm_stderr\": 0.044084400227680794\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5404255319148936,\n \"acc_stderr\": 0.03257901482099835,\n \"acc_norm\": 0.5404255319148936,\n \"acc_norm_stderr\": 0.03257901482099835\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.4473684210526316,\n \"acc_stderr\": 0.046774730044911984,\n \"acc_norm\": 0.4473684210526316,\n \"acc_norm_stderr\": 0.046774730044911984\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.4896551724137931,\n \"acc_stderr\": 0.04165774775728763,\n \"acc_norm\": 0.4896551724137931,\n \"acc_norm_stderr\": 0.04165774775728763\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.41798941798941797,\n \"acc_stderr\": 0.025402555503260912,\n \"acc_norm\": 0.41798941798941797,\n \"acc_norm_stderr\": 0.025402555503260912\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4444444444444444,\n \"acc_stderr\": 0.04444444444444449,\n \"acc_norm\": 0.4444444444444444,\n \"acc_norm_stderr\": 0.04444444444444449\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145633,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145633\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7612903225806451,\n \"acc_stderr\": 0.024251071262208837,\n \"acc_norm\": 0.7612903225806451,\n \"acc_norm_stderr\": 0.024251071262208837\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.034991131376767445,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.034991131376767445\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542127,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542127\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7515151515151515,\n \"acc_stderr\": 0.033744026441394036,\n \"acc_norm\": 0.7515151515151515,\n \"acc_norm_stderr\": 0.033744026441394036\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.02962022787479049,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.02962022787479049\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8549222797927462,\n \"acc_stderr\": 0.025416343096306422,\n \"acc_norm\": 0.8549222797927462,\n \"acc_norm_stderr\": 0.025416343096306422\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6051282051282051,\n \"acc_stderr\": 0.024784316942156402,\n \"acc_norm\": 0.6051282051282051,\n \"acc_norm_stderr\": 0.024784316942156402\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948485,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948485\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6512605042016807,\n \"acc_stderr\": 0.030956636328566545,\n \"acc_norm\": 0.6512605042016807,\n \"acc_norm_stderr\": 0.030956636328566545\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33774834437086093,\n \"acc_stderr\": 0.038615575462551684,\n \"acc_norm\": 0.33774834437086093,\n \"acc_norm_stderr\": 0.038615575462551684\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8311926605504587,\n \"acc_stderr\": 0.01606005626853034,\n \"acc_norm\": 0.8311926605504587,\n \"acc_norm_stderr\": 0.01606005626853034\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.48148148148148145,\n \"acc_stderr\": 0.03407632093854052,\n \"acc_norm\": 0.48148148148148145,\n \"acc_norm_stderr\": 0.03407632093854052\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7892156862745098,\n \"acc_stderr\": 0.02862654791243741,\n \"acc_norm\": 0.7892156862745098,\n \"acc_norm_stderr\": 0.02862654791243741\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7679324894514767,\n \"acc_stderr\": 0.02747974455080851,\n \"acc_norm\": 0.7679324894514767,\n \"acc_norm_stderr\": 0.02747974455080851\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.031381476375754995,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.031381476375754995\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7709923664122137,\n \"acc_stderr\": 0.036853466317118506,\n \"acc_norm\": 0.7709923664122137,\n \"acc_norm_stderr\": 0.036853466317118506\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.768595041322314,\n \"acc_stderr\": 0.03849856098794088,\n \"acc_norm\": 0.768595041322314,\n \"acc_norm_stderr\": 0.03849856098794088\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7685185185185185,\n \"acc_stderr\": 0.04077494709252626,\n \"acc_norm\": 0.7685185185185185,\n \"acc_norm_stderr\": 0.04077494709252626\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7852760736196319,\n \"acc_stderr\": 0.032262193772867744,\n \"acc_norm\": 0.7852760736196319,\n \"acc_norm_stderr\": 0.032262193772867744\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.5089285714285714,\n \"acc_stderr\": 0.04745033255489123,\n \"acc_norm\": 0.5089285714285714,\n \"acc_norm_stderr\": 0.04745033255489123\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8155339805825242,\n \"acc_stderr\": 0.03840423627288276,\n \"acc_norm\": 0.8155339805825242,\n \"acc_norm_stderr\": 0.03840423627288276\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8376068376068376,\n \"acc_stderr\": 0.024161618127987745,\n \"acc_norm\": 0.8376068376068376,\n \"acc_norm_stderr\": 0.024161618127987745\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.013890862162876168,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.013890862162876168\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7052023121387283,\n \"acc_stderr\": 0.024547617794803828,\n \"acc_norm\": 0.7052023121387283,\n \"acc_norm_stderr\": 0.024547617794803828\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.25139664804469275,\n \"acc_stderr\": 0.014508979453553984,\n \"acc_norm\": 0.25139664804469275,\n \"acc_norm_stderr\": 0.014508979453553984\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.02526169121972948,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.02526169121972948\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6816720257234726,\n \"acc_stderr\": 0.026457225067811025,\n \"acc_norm\": 0.6816720257234726,\n \"acc_norm_stderr\": 0.026457225067811025\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7345679012345679,\n \"acc_stderr\": 0.024569223600460845,\n \"acc_norm\": 0.7345679012345679,\n \"acc_norm_stderr\": 0.024569223600460845\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4574468085106383,\n \"acc_stderr\": 0.029719281272236837,\n \"acc_norm\": 0.4574468085106383,\n \"acc_norm_stderr\": 0.029719281272236837\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4595827900912647,\n \"acc_stderr\": 0.012728446067669975,\n \"acc_norm\": 0.4595827900912647,\n \"acc_norm_stderr\": 0.012728446067669975\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.6544117647058824,\n \"acc_stderr\": 0.02888819310398863,\n \"acc_norm\": 0.6544117647058824,\n \"acc_norm_stderr\": 0.02888819310398863\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6699346405228758,\n \"acc_stderr\": 0.019023726160724556,\n \"acc_norm\": 0.6699346405228758,\n \"acc_norm_stderr\": 0.019023726160724556\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6545454545454545,\n \"acc_stderr\": 0.04554619617541054,\n \"acc_norm\": 0.6545454545454545,\n \"acc_norm_stderr\": 0.04554619617541054\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.6979591836734694,\n \"acc_stderr\": 0.0293936093198798,\n \"acc_norm\": 0.6979591836734694,\n \"acc_norm_stderr\": 0.0293936093198798\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8109452736318408,\n \"acc_stderr\": 0.027686913588013024,\n \"acc_norm\": 0.8109452736318408,\n \"acc_norm_stderr\": 0.027686913588013024\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.034873508801977704,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.034873508801977704\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5481927710843374,\n \"acc_stderr\": 0.03874371556587953,\n \"acc_norm\": 0.5481927710843374,\n \"acc_norm_stderr\": 0.03874371556587953\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8362573099415205,\n \"acc_stderr\": 0.028380919596145866,\n \"acc_norm\": 0.8362573099415205,\n \"acc_norm_stderr\": 0.028380919596145866\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2913096695226438,\n \"mc1_stderr\": 0.01590598704818483,\n \"mc2\": 0.474388925160649,\n \"mc2_stderr\": 0.014635683515771682\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7821625887924231,\n \"acc_stderr\": 0.011601066079939324\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.4920394238059136,\n \"acc_stderr\": 0.01377073906313537\n }\n}\n```", "repo_url": "https://huggingface.co/jae24/openhermes_dpo_norobot_0201", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-15-33.723990.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["**/details_harness|winogrande|5_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-15-33.723990.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_15_33.723990", "path": ["results_2024-01-04T14-15-33.723990.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-15-33.723990.parquet"]}]}]}
2024-01-04T14:18:19+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of jae24/openhermes_dpo_norobot_0201 Dataset automatically created during the evaluation run of model jae24/openhermes_dpo_norobot_0201 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:15:33.723990(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of jae24/openhermes_dpo_norobot_0201\n\n\n\nDataset automatically created during the evaluation run of model jae24/openhermes_dpo_norobot_0201 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:15:33.723990(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of jae24/openhermes_dpo_norobot_0201\n\n\n\nDataset automatically created during the evaluation run of model jae24/openhermes_dpo_norobot_0201 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:15:33.723990(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 193, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of jae24/openhermes_dpo_norobot_0201\n\n\n\nDataset automatically created during the evaluation run of model jae24/openhermes_dpo_norobot_0201 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:15:33.723990(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]" ]
3e1bbe00625903f6e117579b39bb9ca089b0d631
# Dataset Card for Evaluation run of EmbeddedLLM/Mistral-7B-Merge-14-v0.4 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [EmbeddedLLM/Mistral-7B-Merge-14-v0.4](https://huggingface.co/EmbeddedLLM/Mistral-7B-Merge-14-v0.4) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_EmbeddedLLM__Mistral-7B-Merge-14-v0.4", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:25:58.424291](https://huggingface.co/datasets/open-llm-leaderboard/details_EmbeddedLLM__Mistral-7B-Merge-14-v0.4/blob/main/results_2024-01-04T14-25-58.424291.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6546862329259969, "acc_stderr": 0.031867948580950975, "acc_norm": 0.6548303834645314, "acc_norm_stderr": 0.0325206187153387, "mc1": 0.40024479804161567, "mc1_stderr": 0.017151605555749138, "mc2": 0.5824837274596946, "mc2_stderr": 0.015539719241734074 }, "harness|arc:challenge|25": { "acc": 0.6390784982935154, "acc_stderr": 0.014034761386175456, "acc_norm": 0.6680887372013652, "acc_norm_stderr": 0.013760988200880538 }, "harness|hellaswag|10": { "acc": 0.6829316869149572, "acc_stderr": 0.0046438327428766435, "acc_norm": 0.8614817765385382, "acc_norm_stderr": 0.003447370972192067 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.29, "acc_stderr": 0.045604802157206845, "acc_norm": 0.29, "acc_norm_stderr": 0.045604802157206845 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6222222222222222, "acc_stderr": 0.04188307537595852, "acc_norm": 0.6222222222222222, "acc_norm_stderr": 0.04188307537595852 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7105263157894737, "acc_stderr": 0.03690677986137283, "acc_norm": 0.7105263157894737, "acc_norm_stderr": 0.03690677986137283 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.62, "acc_stderr": 0.048783173121456316, "acc_norm": 0.62, "acc_norm_stderr": 0.048783173121456316 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.7132075471698113, "acc_stderr": 0.027834912527544064, "acc_norm": 0.7132075471698113, "acc_norm_stderr": 0.027834912527544064 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7777777777777778, "acc_stderr": 0.03476590104304134, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.03476590104304134 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.47, "acc_stderr": 0.050161355804659205, "acc_norm": 0.47, "acc_norm_stderr": 0.050161355804659205 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.55, "acc_stderr": 0.05, "acc_norm": 0.55, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6820809248554913, "acc_stderr": 0.0355068398916558, "acc_norm": 0.6820809248554913, "acc_norm_stderr": 0.0355068398916558 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4215686274509804, "acc_stderr": 0.049135952012744975, "acc_norm": 0.4215686274509804, "acc_norm_stderr": 0.049135952012744975 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.04229525846816505, "acc_norm": 0.77, "acc_norm_stderr": 0.04229525846816505 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5659574468085107, "acc_stderr": 0.03240038086792747, "acc_norm": 0.5659574468085107, "acc_norm_stderr": 0.03240038086792747 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5087719298245614, "acc_stderr": 0.04702880432049615, "acc_norm": 0.5087719298245614, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5517241379310345, "acc_stderr": 0.04144311810878152, "acc_norm": 0.5517241379310345, "acc_norm_stderr": 0.04144311810878152 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.40476190476190477, "acc_stderr": 0.025279850397404904, "acc_norm": 0.40476190476190477, "acc_norm_stderr": 0.025279850397404904 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.47619047619047616, "acc_stderr": 0.04467062628403273, "acc_norm": 0.47619047619047616, "acc_norm_stderr": 0.04467062628403273 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7935483870967742, "acc_stderr": 0.023025899617188723, "acc_norm": 0.7935483870967742, "acc_norm_stderr": 0.023025899617188723 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.4975369458128079, "acc_stderr": 0.03517945038691063, "acc_norm": 0.4975369458128079, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542129, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542129 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7696969696969697, "acc_stderr": 0.0328766675860349, "acc_norm": 0.7696969696969697, "acc_norm_stderr": 0.0328766675860349 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7878787878787878, "acc_stderr": 0.029126522834586808, "acc_norm": 0.7878787878787878, "acc_norm_stderr": 0.029126522834586808 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.021500249576033484, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.021500249576033484 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6641025641025641, "acc_stderr": 0.023946724741563976, "acc_norm": 0.6641025641025641, "acc_norm_stderr": 0.023946724741563976 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32592592592592595, "acc_stderr": 0.02857834836547308, "acc_norm": 0.32592592592592595, "acc_norm_stderr": 0.02857834836547308 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6932773109243697, "acc_stderr": 0.029953823891887037, "acc_norm": 0.6932773109243697, "acc_norm_stderr": 0.029953823891887037 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.3443708609271523, "acc_stderr": 0.038796870240733264, "acc_norm": 0.3443708609271523, "acc_norm_stderr": 0.038796870240733264 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8440366972477065, "acc_stderr": 0.015555802713590175, "acc_norm": 0.8440366972477065, "acc_norm_stderr": 0.015555802713590175 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5138888888888888, "acc_stderr": 0.03408655867977749, "acc_norm": 0.5138888888888888, "acc_norm_stderr": 0.03408655867977749 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8382352941176471, "acc_stderr": 0.025845017986926917, "acc_norm": 0.8382352941176471, "acc_norm_stderr": 0.025845017986926917 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8227848101265823, "acc_stderr": 0.024856364184503228, "acc_norm": 0.8227848101265823, "acc_norm_stderr": 0.024856364184503228 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.695067264573991, "acc_stderr": 0.030898610882477515, "acc_norm": 0.695067264573991, "acc_norm_stderr": 0.030898610882477515 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7938931297709924, "acc_stderr": 0.03547771004159465, "acc_norm": 0.7938931297709924, "acc_norm_stderr": 0.03547771004159465 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8099173553719008, "acc_stderr": 0.03581796951709282, "acc_norm": 0.8099173553719008, "acc_norm_stderr": 0.03581796951709282 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8055555555555556, "acc_stderr": 0.038260763248848646, "acc_norm": 0.8055555555555556, "acc_norm_stderr": 0.038260763248848646 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7975460122699386, "acc_stderr": 0.031570650789119005, "acc_norm": 0.7975460122699386, "acc_norm_stderr": 0.031570650789119005 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4642857142857143, "acc_stderr": 0.04733667890053756, "acc_norm": 0.4642857142857143, "acc_norm_stderr": 0.04733667890053756 }, "harness|hendrycksTest-management|5": { "acc": 0.8349514563106796, "acc_stderr": 0.036756688322331886, "acc_norm": 0.8349514563106796, "acc_norm_stderr": 0.036756688322331886 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8504273504273504, "acc_stderr": 0.023365051491753715, "acc_norm": 0.8504273504273504, "acc_norm_stderr": 0.023365051491753715 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.72, "acc_stderr": 0.045126085985421276, "acc_norm": 0.72, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8339719029374202, "acc_stderr": 0.0133064782430663, "acc_norm": 0.8339719029374202, "acc_norm_stderr": 0.0133064782430663 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7514450867052023, "acc_stderr": 0.023267528432100174, "acc_norm": 0.7514450867052023, "acc_norm_stderr": 0.023267528432100174 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.394413407821229, "acc_stderr": 0.01634538676210397, "acc_norm": 0.394413407821229, "acc_norm_stderr": 0.01634538676210397 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7287581699346405, "acc_stderr": 0.02545775669666788, "acc_norm": 0.7287581699346405, "acc_norm_stderr": 0.02545775669666788 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7266881028938906, "acc_stderr": 0.02531176597542612, "acc_norm": 0.7266881028938906, "acc_norm_stderr": 0.02531176597542612 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7253086419753086, "acc_stderr": 0.024836057868294677, "acc_norm": 0.7253086419753086, "acc_norm_stderr": 0.024836057868294677 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4858156028368794, "acc_stderr": 0.02981549448368206, "acc_norm": 0.4858156028368794, "acc_norm_stderr": 0.02981549448368206 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.47327249022164275, "acc_stderr": 0.012751977967676008, "acc_norm": 0.47327249022164275, "acc_norm_stderr": 0.012751977967676008 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7022058823529411, "acc_stderr": 0.02777829870154544, "acc_norm": 0.7022058823529411, "acc_norm_stderr": 0.02777829870154544 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6699346405228758, "acc_stderr": 0.019023726160724553, "acc_norm": 0.6699346405228758, "acc_norm_stderr": 0.019023726160724553 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6727272727272727, "acc_stderr": 0.0449429086625209, "acc_norm": 0.6727272727272727, "acc_norm_stderr": 0.0449429086625209 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7306122448979592, "acc_stderr": 0.02840125202902294, "acc_norm": 0.7306122448979592, "acc_norm_stderr": 0.02840125202902294 }, "harness|hendrycksTest-sociology|5": { "acc": 0.8407960199004975, "acc_stderr": 0.02587064676616913, "acc_norm": 0.8407960199004975, "acc_norm_stderr": 0.02587064676616913 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.86, "acc_stderr": 0.0348735088019777, "acc_norm": 0.86, "acc_norm_stderr": 0.0348735088019777 }, "harness|hendrycksTest-virology|5": { "acc": 0.5240963855421686, "acc_stderr": 0.03887971849597264, "acc_norm": 0.5240963855421686, "acc_norm_stderr": 0.03887971849597264 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8421052631578947, "acc_stderr": 0.02796678585916089, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.02796678585916089 }, "harness|truthfulqa:mc|0": { "mc1": 0.40024479804161567, "mc1_stderr": 0.017151605555749138, "mc2": 0.5824837274596946, "mc2_stderr": 0.015539719241734074 }, "harness|winogrande|5": { "acc": 0.8003157063930545, "acc_stderr": 0.011235328382625849 }, "harness|gsm8k|5": { "acc": 0.7081122062168309, "acc_stderr": 0.012522795894420869 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_EmbeddedLLM__Mistral-7B-Merge-14-v0.4
[ "region:us" ]
2024-01-04T14:28:16+00:00
{"pretty_name": "Evaluation run of EmbeddedLLM/Mistral-7B-Merge-14-v0.4", "dataset_summary": "Dataset automatically created during the evaluation run of model [EmbeddedLLM/Mistral-7B-Merge-14-v0.4](https://huggingface.co/EmbeddedLLM/Mistral-7B-Merge-14-v0.4) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_EmbeddedLLM__Mistral-7B-Merge-14-v0.4\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:25:58.424291](https://huggingface.co/datasets/open-llm-leaderboard/details_EmbeddedLLM__Mistral-7B-Merge-14-v0.4/blob/main/results_2024-01-04T14-25-58.424291.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6546862329259969,\n \"acc_stderr\": 0.031867948580950975,\n \"acc_norm\": 0.6548303834645314,\n \"acc_norm_stderr\": 0.0325206187153387,\n \"mc1\": 0.40024479804161567,\n \"mc1_stderr\": 0.017151605555749138,\n \"mc2\": 0.5824837274596946,\n \"mc2_stderr\": 0.015539719241734074\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6390784982935154,\n \"acc_stderr\": 0.014034761386175456,\n \"acc_norm\": 0.6680887372013652,\n \"acc_norm_stderr\": 0.013760988200880538\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6829316869149572,\n \"acc_stderr\": 0.0046438327428766435,\n \"acc_norm\": 0.8614817765385382,\n \"acc_norm_stderr\": 0.003447370972192067\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.29,\n \"acc_stderr\": 0.045604802157206845,\n \"acc_norm\": 0.29,\n \"acc_norm_stderr\": 0.045604802157206845\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6222222222222222,\n \"acc_stderr\": 0.04188307537595852,\n \"acc_norm\": 0.6222222222222222,\n \"acc_norm_stderr\": 0.04188307537595852\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7105263157894737,\n \"acc_stderr\": 0.03690677986137283,\n \"acc_norm\": 0.7105263157894737,\n \"acc_norm_stderr\": 0.03690677986137283\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.62,\n \"acc_stderr\": 0.048783173121456316,\n \"acc_norm\": 0.62,\n \"acc_norm_stderr\": 0.048783173121456316\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.7132075471698113,\n \"acc_stderr\": 0.027834912527544064,\n \"acc_norm\": 0.7132075471698113,\n \"acc_norm_stderr\": 0.027834912527544064\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.03476590104304134,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.03476590104304134\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.47,\n \"acc_stderr\": 0.050161355804659205,\n \"acc_norm\": 0.47,\n \"acc_norm_stderr\": 0.050161355804659205\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.55,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.55,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6820809248554913,\n \"acc_stderr\": 0.0355068398916558,\n \"acc_norm\": 0.6820809248554913,\n \"acc_norm_stderr\": 0.0355068398916558\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4215686274509804,\n \"acc_stderr\": 0.049135952012744975,\n \"acc_norm\": 0.4215686274509804,\n \"acc_norm_stderr\": 0.049135952012744975\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.04229525846816505,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.04229525846816505\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5659574468085107,\n \"acc_stderr\": 0.03240038086792747,\n \"acc_norm\": 0.5659574468085107,\n \"acc_norm_stderr\": 0.03240038086792747\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5087719298245614,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.5087719298245614,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5517241379310345,\n \"acc_stderr\": 0.04144311810878152,\n \"acc_norm\": 0.5517241379310345,\n \"acc_norm_stderr\": 0.04144311810878152\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.40476190476190477,\n \"acc_stderr\": 0.025279850397404904,\n \"acc_norm\": 0.40476190476190477,\n \"acc_norm_stderr\": 0.025279850397404904\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.47619047619047616,\n \"acc_stderr\": 0.04467062628403273,\n \"acc_norm\": 0.47619047619047616,\n \"acc_norm_stderr\": 0.04467062628403273\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7935483870967742,\n \"acc_stderr\": 0.023025899617188723,\n \"acc_norm\": 0.7935483870967742,\n \"acc_norm_stderr\": 0.023025899617188723\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.4975369458128079,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.4975369458128079,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542129,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542129\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7696969696969697,\n \"acc_stderr\": 0.0328766675860349,\n \"acc_norm\": 0.7696969696969697,\n \"acc_norm_stderr\": 0.0328766675860349\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7878787878787878,\n \"acc_stderr\": 0.029126522834586808,\n \"acc_norm\": 0.7878787878787878,\n \"acc_norm_stderr\": 0.029126522834586808\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.021500249576033484,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.021500249576033484\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6641025641025641,\n \"acc_stderr\": 0.023946724741563976,\n \"acc_norm\": 0.6641025641025641,\n \"acc_norm_stderr\": 0.023946724741563976\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32592592592592595,\n \"acc_stderr\": 0.02857834836547308,\n \"acc_norm\": 0.32592592592592595,\n \"acc_norm_stderr\": 0.02857834836547308\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6932773109243697,\n \"acc_stderr\": 0.029953823891887037,\n \"acc_norm\": 0.6932773109243697,\n \"acc_norm_stderr\": 0.029953823891887037\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.3443708609271523,\n \"acc_stderr\": 0.038796870240733264,\n \"acc_norm\": 0.3443708609271523,\n \"acc_norm_stderr\": 0.038796870240733264\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8440366972477065,\n \"acc_stderr\": 0.015555802713590175,\n \"acc_norm\": 0.8440366972477065,\n \"acc_norm_stderr\": 0.015555802713590175\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5138888888888888,\n \"acc_stderr\": 0.03408655867977749,\n \"acc_norm\": 0.5138888888888888,\n \"acc_norm_stderr\": 0.03408655867977749\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8382352941176471,\n \"acc_stderr\": 0.025845017986926917,\n \"acc_norm\": 0.8382352941176471,\n \"acc_norm_stderr\": 0.025845017986926917\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8227848101265823,\n \"acc_stderr\": 0.024856364184503228,\n \"acc_norm\": 0.8227848101265823,\n \"acc_norm_stderr\": 0.024856364184503228\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.695067264573991,\n \"acc_stderr\": 0.030898610882477515,\n \"acc_norm\": 0.695067264573991,\n \"acc_norm_stderr\": 0.030898610882477515\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7938931297709924,\n \"acc_stderr\": 0.03547771004159465,\n \"acc_norm\": 0.7938931297709924,\n \"acc_norm_stderr\": 0.03547771004159465\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8099173553719008,\n \"acc_stderr\": 0.03581796951709282,\n \"acc_norm\": 0.8099173553719008,\n \"acc_norm_stderr\": 0.03581796951709282\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8055555555555556,\n \"acc_stderr\": 0.038260763248848646,\n \"acc_norm\": 0.8055555555555556,\n \"acc_norm_stderr\": 0.038260763248848646\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7975460122699386,\n \"acc_stderr\": 0.031570650789119005,\n \"acc_norm\": 0.7975460122699386,\n \"acc_norm_stderr\": 0.031570650789119005\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4642857142857143,\n \"acc_stderr\": 0.04733667890053756,\n \"acc_norm\": 0.4642857142857143,\n \"acc_norm_stderr\": 0.04733667890053756\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8349514563106796,\n \"acc_stderr\": 0.036756688322331886,\n \"acc_norm\": 0.8349514563106796,\n \"acc_norm_stderr\": 0.036756688322331886\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8504273504273504,\n \"acc_stderr\": 0.023365051491753715,\n \"acc_norm\": 0.8504273504273504,\n \"acc_norm_stderr\": 0.023365051491753715\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8339719029374202,\n \"acc_stderr\": 0.0133064782430663,\n \"acc_norm\": 0.8339719029374202,\n \"acc_norm_stderr\": 0.0133064782430663\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7514450867052023,\n \"acc_stderr\": 0.023267528432100174,\n \"acc_norm\": 0.7514450867052023,\n \"acc_norm_stderr\": 0.023267528432100174\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.394413407821229,\n \"acc_stderr\": 0.01634538676210397,\n \"acc_norm\": 0.394413407821229,\n \"acc_norm_stderr\": 0.01634538676210397\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7287581699346405,\n \"acc_stderr\": 0.02545775669666788,\n \"acc_norm\": 0.7287581699346405,\n \"acc_norm_stderr\": 0.02545775669666788\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7266881028938906,\n \"acc_stderr\": 0.02531176597542612,\n \"acc_norm\": 0.7266881028938906,\n \"acc_norm_stderr\": 0.02531176597542612\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7253086419753086,\n \"acc_stderr\": 0.024836057868294677,\n \"acc_norm\": 0.7253086419753086,\n \"acc_norm_stderr\": 0.024836057868294677\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4858156028368794,\n \"acc_stderr\": 0.02981549448368206,\n \"acc_norm\": 0.4858156028368794,\n \"acc_norm_stderr\": 0.02981549448368206\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.47327249022164275,\n \"acc_stderr\": 0.012751977967676008,\n \"acc_norm\": 0.47327249022164275,\n \"acc_norm_stderr\": 0.012751977967676008\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7022058823529411,\n \"acc_stderr\": 0.02777829870154544,\n \"acc_norm\": 0.7022058823529411,\n \"acc_norm_stderr\": 0.02777829870154544\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6699346405228758,\n \"acc_stderr\": 0.019023726160724553,\n \"acc_norm\": 0.6699346405228758,\n \"acc_norm_stderr\": 0.019023726160724553\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6727272727272727,\n \"acc_stderr\": 0.0449429086625209,\n \"acc_norm\": 0.6727272727272727,\n \"acc_norm_stderr\": 0.0449429086625209\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7306122448979592,\n \"acc_stderr\": 0.02840125202902294,\n \"acc_norm\": 0.7306122448979592,\n \"acc_norm_stderr\": 0.02840125202902294\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.8407960199004975,\n \"acc_stderr\": 0.02587064676616913,\n \"acc_norm\": 0.8407960199004975,\n \"acc_norm_stderr\": 0.02587064676616913\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.86,\n \"acc_stderr\": 0.0348735088019777,\n \"acc_norm\": 0.86,\n \"acc_norm_stderr\": 0.0348735088019777\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5240963855421686,\n \"acc_stderr\": 0.03887971849597264,\n \"acc_norm\": 0.5240963855421686,\n \"acc_norm_stderr\": 0.03887971849597264\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.02796678585916089,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.02796678585916089\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.40024479804161567,\n \"mc1_stderr\": 0.017151605555749138,\n \"mc2\": 0.5824837274596946,\n \"mc2_stderr\": 0.015539719241734074\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8003157063930545,\n \"acc_stderr\": 0.011235328382625849\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.7081122062168309,\n \"acc_stderr\": 0.012522795894420869\n }\n}\n```", "repo_url": "https://huggingface.co/EmbeddedLLM/Mistral-7B-Merge-14-v0.4", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-25-58.424291.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["**/details_harness|winogrande|5_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-25-58.424291.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_25_58.424291", "path": ["results_2024-01-04T14-25-58.424291.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-25-58.424291.parquet"]}]}]}
2024-01-04T14:28:39+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of EmbeddedLLM/Mistral-7B-Merge-14-v0.4 Dataset automatically created during the evaluation run of model EmbeddedLLM/Mistral-7B-Merge-14-v0.4 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:25:58.424291(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of EmbeddedLLM/Mistral-7B-Merge-14-v0.4\n\n\n\nDataset automatically created during the evaluation run of model EmbeddedLLM/Mistral-7B-Merge-14-v0.4 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:25:58.424291(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of EmbeddedLLM/Mistral-7B-Merge-14-v0.4\n\n\n\nDataset automatically created during the evaluation run of model EmbeddedLLM/Mistral-7B-Merge-14-v0.4 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:25:58.424291(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of EmbeddedLLM/Mistral-7B-Merge-14-v0.4\n\n\n\nDataset automatically created during the evaluation run of model EmbeddedLLM/Mistral-7B-Merge-14-v0.4 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:25:58.424291(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
2ad291dd147ebbd0824767c331e1a457f566ddef
# Dataset Card for Evaluation run of KnutJaegersberg/Qwen-1_8B-Llamafied <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [KnutJaegersberg/Qwen-1_8B-Llamafied](https://huggingface.co/KnutJaegersberg/Qwen-1_8B-Llamafied) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_KnutJaegersberg__Qwen-1_8B-Llamafied", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:29:46.400920](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__Qwen-1_8B-Llamafied/blob/main/results_2024-01-04T14-29-46.400920.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.46036469687606746, "acc_stderr": 0.03481465754956138, "acc_norm": 0.4643902964835488, "acc_norm_stderr": 0.03556658647639108, "mc1": 0.23745410036719705, "mc1_stderr": 0.014896277441041836, "mc2": 0.39408163994148815, "mc2_stderr": 0.014247164817369498 }, "harness|arc:challenge|25": { "acc": 0.3447098976109215, "acc_stderr": 0.01388881628678211, "acc_norm": 0.3771331058020478, "acc_norm_stderr": 0.01416336689619259 }, "harness|hellaswag|10": { "acc": 0.44303923521210914, "acc_stderr": 0.004957296691391577, "acc_norm": 0.5887273451503684, "acc_norm_stderr": 0.004910588449330011 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.28, "acc_stderr": 0.045126085985421296, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421296 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.43703703703703706, "acc_stderr": 0.04284958639753399, "acc_norm": 0.43703703703703706, "acc_norm_stderr": 0.04284958639753399 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.4868421052631579, "acc_stderr": 0.04067533136309173, "acc_norm": 0.4868421052631579, "acc_norm_stderr": 0.04067533136309173 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.51, "acc_stderr": 0.05024183937956912, "acc_norm": 0.51, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.5283018867924528, "acc_stderr": 0.030723535249006107, "acc_norm": 0.5283018867924528, "acc_norm_stderr": 0.030723535249006107 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.4166666666666667, "acc_stderr": 0.041227287076512825, "acc_norm": 0.4166666666666667, "acc_norm_stderr": 0.041227287076512825 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.45, "acc_stderr": 0.049999999999999996, "acc_norm": 0.45, "acc_norm_stderr": 0.049999999999999996 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.45664739884393063, "acc_stderr": 0.037981065660144996, "acc_norm": 0.45664739884393063, "acc_norm_stderr": 0.037981065660144996 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3235294117647059, "acc_stderr": 0.046550104113196177, "acc_norm": 0.3235294117647059, "acc_norm_stderr": 0.046550104113196177 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.52, "acc_stderr": 0.050211673156867795, "acc_norm": 0.52, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.39574468085106385, "acc_stderr": 0.031967586978353627, "acc_norm": 0.39574468085106385, "acc_norm_stderr": 0.031967586978353627 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.34210526315789475, "acc_stderr": 0.04462917535336936, "acc_norm": 0.34210526315789475, "acc_norm_stderr": 0.04462917535336936 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.496551724137931, "acc_stderr": 0.041665675771015785, "acc_norm": 0.496551724137931, "acc_norm_stderr": 0.041665675771015785 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.32275132275132273, "acc_stderr": 0.024078943243597016, "acc_norm": 0.32275132275132273, "acc_norm_stderr": 0.024078943243597016 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.30158730158730157, "acc_stderr": 0.04104947269903394, "acc_norm": 0.30158730158730157, "acc_norm_stderr": 0.04104947269903394 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.5451612903225806, "acc_stderr": 0.028327743091561063, "acc_norm": 0.5451612903225806, "acc_norm_stderr": 0.028327743091561063 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.3891625615763547, "acc_stderr": 0.03430462416103872, "acc_norm": 0.3891625615763547, "acc_norm_stderr": 0.03430462416103872 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.4, "acc_stderr": 0.049236596391733084, "acc_norm": 0.4, "acc_norm_stderr": 0.049236596391733084 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.593939393939394, "acc_stderr": 0.03834816355401181, "acc_norm": 0.593939393939394, "acc_norm_stderr": 0.03834816355401181 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.5606060606060606, "acc_stderr": 0.035360859475294805, "acc_norm": 0.5606060606060606, "acc_norm_stderr": 0.035360859475294805 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.5958549222797928, "acc_stderr": 0.0354150857888402, "acc_norm": 0.5958549222797928, "acc_norm_stderr": 0.0354150857888402 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.4282051282051282, "acc_stderr": 0.02508830145469484, "acc_norm": 0.4282051282051282, "acc_norm_stderr": 0.02508830145469484 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3148148148148148, "acc_stderr": 0.02831753349606648, "acc_norm": 0.3148148148148148, "acc_norm_stderr": 0.02831753349606648 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.453781512605042, "acc_stderr": 0.03233943468182088, "acc_norm": 0.453781512605042, "acc_norm_stderr": 0.03233943468182088 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.33112582781456956, "acc_stderr": 0.038425817186598696, "acc_norm": 0.33112582781456956, "acc_norm_stderr": 0.038425817186598696 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.5688073394495413, "acc_stderr": 0.02123336503031956, "acc_norm": 0.5688073394495413, "acc_norm_stderr": 0.02123336503031956 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.35648148148148145, "acc_stderr": 0.032664783315272714, "acc_norm": 0.35648148148148145, "acc_norm_stderr": 0.032664783315272714 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.5196078431372549, "acc_stderr": 0.03506612560524866, "acc_norm": 0.5196078431372549, "acc_norm_stderr": 0.03506612560524866 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.5949367088607594, "acc_stderr": 0.03195514741370671, "acc_norm": 0.5949367088607594, "acc_norm_stderr": 0.03195514741370671 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.45739910313901344, "acc_stderr": 0.033435777055830646, "acc_norm": 0.45739910313901344, "acc_norm_stderr": 0.033435777055830646 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.5343511450381679, "acc_stderr": 0.04374928560599738, "acc_norm": 0.5343511450381679, "acc_norm_stderr": 0.04374928560599738 }, "harness|hendrycksTest-international_law|5": { "acc": 0.6363636363636364, "acc_stderr": 0.04391326286724071, "acc_norm": 0.6363636363636364, "acc_norm_stderr": 0.04391326286724071 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.5555555555555556, "acc_stderr": 0.04803752235190193, "acc_norm": 0.5555555555555556, "acc_norm_stderr": 0.04803752235190193 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.4294478527607362, "acc_stderr": 0.038890666191127216, "acc_norm": 0.4294478527607362, "acc_norm_stderr": 0.038890666191127216 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.33035714285714285, "acc_stderr": 0.04464285714285713, "acc_norm": 0.33035714285714285, "acc_norm_stderr": 0.04464285714285713 }, "harness|hendrycksTest-management|5": { "acc": 0.6796116504854369, "acc_stderr": 0.04620284082280041, "acc_norm": 0.6796116504854369, "acc_norm_stderr": 0.04620284082280041 }, "harness|hendrycksTest-marketing|5": { "acc": 0.7264957264957265, "acc_stderr": 0.029202540153431173, "acc_norm": 0.7264957264957265, "acc_norm_stderr": 0.029202540153431173 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.49, "acc_stderr": 0.05024183937956911, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956911 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.5862068965517241, "acc_stderr": 0.017612204084663765, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.017612204084663765 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.5144508670520231, "acc_stderr": 0.026907849856282542, "acc_norm": 0.5144508670520231, "acc_norm_stderr": 0.026907849856282542 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.24022346368715083, "acc_stderr": 0.014288343803925295, "acc_norm": 0.24022346368715083, "acc_norm_stderr": 0.014288343803925295 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.5816993464052288, "acc_stderr": 0.02824513402438729, "acc_norm": 0.5816993464052288, "acc_norm_stderr": 0.02824513402438729 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.4983922829581994, "acc_stderr": 0.02839794490780661, "acc_norm": 0.4983922829581994, "acc_norm_stderr": 0.02839794490780661 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.4691358024691358, "acc_stderr": 0.02776768960683392, "acc_norm": 0.4691358024691358, "acc_norm_stderr": 0.02776768960683392 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.34397163120567376, "acc_stderr": 0.028338017428611313, "acc_norm": 0.34397163120567376, "acc_norm_stderr": 0.028338017428611313 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.34159061277705344, "acc_stderr": 0.012112391320842858, "acc_norm": 0.34159061277705344, "acc_norm_stderr": 0.012112391320842858 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.4007352941176471, "acc_stderr": 0.02976826352893311, "acc_norm": 0.4007352941176471, "acc_norm_stderr": 0.02976826352893311 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.3790849673202614, "acc_stderr": 0.019627444748412236, "acc_norm": 0.3790849673202614, "acc_norm_stderr": 0.019627444748412236 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6, "acc_stderr": 0.0469237132203465, "acc_norm": 0.6, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.49795918367346936, "acc_stderr": 0.0320089533497105, "acc_norm": 0.49795918367346936, "acc_norm_stderr": 0.0320089533497105 }, "harness|hendrycksTest-sociology|5": { "acc": 0.6169154228855721, "acc_stderr": 0.034375193373382504, "acc_norm": 0.6169154228855721, "acc_norm_stderr": 0.034375193373382504 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.59, "acc_stderr": 0.04943110704237102, "acc_norm": 0.59, "acc_norm_stderr": 0.04943110704237102 }, "harness|hendrycksTest-virology|5": { "acc": 0.39156626506024095, "acc_stderr": 0.03799857454479637, "acc_norm": 0.39156626506024095, "acc_norm_stderr": 0.03799857454479637 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.5614035087719298, "acc_stderr": 0.038057975055904594, "acc_norm": 0.5614035087719298, "acc_norm_stderr": 0.038057975055904594 }, "harness|truthfulqa:mc|0": { "mc1": 0.23745410036719705, "mc1_stderr": 0.014896277441041836, "mc2": 0.39408163994148815, "mc2_stderr": 0.014247164817369498 }, "harness|winogrande|5": { "acc": 0.6172059984214681, "acc_stderr": 0.013660946109442006 }, "harness|gsm8k|5": { "acc": 0.2441243366186505, "acc_stderr": 0.011832404674077594 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_KnutJaegersberg__Qwen-1_8B-Llamafied
[ "region:us" ]
2024-01-04T14:31:53+00:00
{"pretty_name": "Evaluation run of KnutJaegersberg/Qwen-1_8B-Llamafied", "dataset_summary": "Dataset automatically created during the evaluation run of model [KnutJaegersberg/Qwen-1_8B-Llamafied](https://huggingface.co/KnutJaegersberg/Qwen-1_8B-Llamafied) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KnutJaegersberg__Qwen-1_8B-Llamafied\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:29:46.400920](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__Qwen-1_8B-Llamafied/blob/main/results_2024-01-04T14-29-46.400920.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.46036469687606746,\n \"acc_stderr\": 0.03481465754956138,\n \"acc_norm\": 0.4643902964835488,\n \"acc_norm_stderr\": 0.03556658647639108,\n \"mc1\": 0.23745410036719705,\n \"mc1_stderr\": 0.014896277441041836,\n \"mc2\": 0.39408163994148815,\n \"mc2_stderr\": 0.014247164817369498\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.3447098976109215,\n \"acc_stderr\": 0.01388881628678211,\n \"acc_norm\": 0.3771331058020478,\n \"acc_norm_stderr\": 0.01416336689619259\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.44303923521210914,\n \"acc_stderr\": 0.004957296691391577,\n \"acc_norm\": 0.5887273451503684,\n \"acc_norm_stderr\": 0.004910588449330011\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421296,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421296\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.43703703703703706,\n \"acc_stderr\": 0.04284958639753399,\n \"acc_norm\": 0.43703703703703706,\n \"acc_norm_stderr\": 0.04284958639753399\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.4868421052631579,\n \"acc_stderr\": 0.04067533136309173,\n \"acc_norm\": 0.4868421052631579,\n \"acc_norm_stderr\": 0.04067533136309173\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.51,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.51,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.5283018867924528,\n \"acc_stderr\": 0.030723535249006107,\n \"acc_norm\": 0.5283018867924528,\n \"acc_norm_stderr\": 0.030723535249006107\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.4166666666666667,\n \"acc_stderr\": 0.041227287076512825,\n \"acc_norm\": 0.4166666666666667,\n \"acc_norm_stderr\": 0.041227287076512825\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.049999999999999996,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.049999999999999996\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.45664739884393063,\n \"acc_stderr\": 0.037981065660144996,\n \"acc_norm\": 0.45664739884393063,\n \"acc_norm_stderr\": 0.037981065660144996\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3235294117647059,\n \"acc_stderr\": 0.046550104113196177,\n \"acc_norm\": 0.3235294117647059,\n \"acc_norm_stderr\": 0.046550104113196177\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.52,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.52,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.39574468085106385,\n \"acc_stderr\": 0.031967586978353627,\n \"acc_norm\": 0.39574468085106385,\n \"acc_norm_stderr\": 0.031967586978353627\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.34210526315789475,\n \"acc_stderr\": 0.04462917535336936,\n \"acc_norm\": 0.34210526315789475,\n \"acc_norm_stderr\": 0.04462917535336936\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.496551724137931,\n \"acc_stderr\": 0.041665675771015785,\n \"acc_norm\": 0.496551724137931,\n \"acc_norm_stderr\": 0.041665675771015785\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.32275132275132273,\n \"acc_stderr\": 0.024078943243597016,\n \"acc_norm\": 0.32275132275132273,\n \"acc_norm_stderr\": 0.024078943243597016\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.30158730158730157,\n \"acc_stderr\": 0.04104947269903394,\n \"acc_norm\": 0.30158730158730157,\n \"acc_norm_stderr\": 0.04104947269903394\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.5451612903225806,\n \"acc_stderr\": 0.028327743091561063,\n \"acc_norm\": 0.5451612903225806,\n \"acc_norm_stderr\": 0.028327743091561063\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.3891625615763547,\n \"acc_stderr\": 0.03430462416103872,\n \"acc_norm\": 0.3891625615763547,\n \"acc_norm_stderr\": 0.03430462416103872\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.049236596391733084,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.049236596391733084\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.593939393939394,\n \"acc_stderr\": 0.03834816355401181,\n \"acc_norm\": 0.593939393939394,\n \"acc_norm_stderr\": 0.03834816355401181\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.5606060606060606,\n \"acc_stderr\": 0.035360859475294805,\n \"acc_norm\": 0.5606060606060606,\n \"acc_norm_stderr\": 0.035360859475294805\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.5958549222797928,\n \"acc_stderr\": 0.0354150857888402,\n \"acc_norm\": 0.5958549222797928,\n \"acc_norm_stderr\": 0.0354150857888402\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.4282051282051282,\n \"acc_stderr\": 0.02508830145469484,\n \"acc_norm\": 0.4282051282051282,\n \"acc_norm_stderr\": 0.02508830145469484\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3148148148148148,\n \"acc_stderr\": 0.02831753349606648,\n \"acc_norm\": 0.3148148148148148,\n \"acc_norm_stderr\": 0.02831753349606648\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.453781512605042,\n \"acc_stderr\": 0.03233943468182088,\n \"acc_norm\": 0.453781512605042,\n \"acc_norm_stderr\": 0.03233943468182088\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.33112582781456956,\n \"acc_stderr\": 0.038425817186598696,\n \"acc_norm\": 0.33112582781456956,\n \"acc_norm_stderr\": 0.038425817186598696\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.5688073394495413,\n \"acc_stderr\": 0.02123336503031956,\n \"acc_norm\": 0.5688073394495413,\n \"acc_norm_stderr\": 0.02123336503031956\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.35648148148148145,\n \"acc_stderr\": 0.032664783315272714,\n \"acc_norm\": 0.35648148148148145,\n \"acc_norm_stderr\": 0.032664783315272714\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.5196078431372549,\n \"acc_stderr\": 0.03506612560524866,\n \"acc_norm\": 0.5196078431372549,\n \"acc_norm_stderr\": 0.03506612560524866\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.5949367088607594,\n \"acc_stderr\": 0.03195514741370671,\n \"acc_norm\": 0.5949367088607594,\n \"acc_norm_stderr\": 0.03195514741370671\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.45739910313901344,\n \"acc_stderr\": 0.033435777055830646,\n \"acc_norm\": 0.45739910313901344,\n \"acc_norm_stderr\": 0.033435777055830646\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.5343511450381679,\n \"acc_stderr\": 0.04374928560599738,\n \"acc_norm\": 0.5343511450381679,\n \"acc_norm_stderr\": 0.04374928560599738\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.6363636363636364,\n \"acc_stderr\": 0.04391326286724071,\n \"acc_norm\": 0.6363636363636364,\n \"acc_norm_stderr\": 0.04391326286724071\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.5555555555555556,\n \"acc_stderr\": 0.04803752235190193,\n \"acc_norm\": 0.5555555555555556,\n \"acc_norm_stderr\": 0.04803752235190193\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.4294478527607362,\n \"acc_stderr\": 0.038890666191127216,\n \"acc_norm\": 0.4294478527607362,\n \"acc_norm_stderr\": 0.038890666191127216\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.33035714285714285,\n \"acc_stderr\": 0.04464285714285713,\n \"acc_norm\": 0.33035714285714285,\n \"acc_norm_stderr\": 0.04464285714285713\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.6796116504854369,\n \"acc_stderr\": 0.04620284082280041,\n \"acc_norm\": 0.6796116504854369,\n \"acc_norm_stderr\": 0.04620284082280041\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.7264957264957265,\n \"acc_stderr\": 0.029202540153431173,\n \"acc_norm\": 0.7264957264957265,\n \"acc_norm_stderr\": 0.029202540153431173\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956911,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956911\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.017612204084663765,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.017612204084663765\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.5144508670520231,\n \"acc_stderr\": 0.026907849856282542,\n \"acc_norm\": 0.5144508670520231,\n \"acc_norm_stderr\": 0.026907849856282542\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.24022346368715083,\n \"acc_stderr\": 0.014288343803925295,\n \"acc_norm\": 0.24022346368715083,\n \"acc_norm_stderr\": 0.014288343803925295\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.5816993464052288,\n \"acc_stderr\": 0.02824513402438729,\n \"acc_norm\": 0.5816993464052288,\n \"acc_norm_stderr\": 0.02824513402438729\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.4983922829581994,\n \"acc_stderr\": 0.02839794490780661,\n \"acc_norm\": 0.4983922829581994,\n \"acc_norm_stderr\": 0.02839794490780661\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.4691358024691358,\n \"acc_stderr\": 0.02776768960683392,\n \"acc_norm\": 0.4691358024691358,\n \"acc_norm_stderr\": 0.02776768960683392\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.34397163120567376,\n \"acc_stderr\": 0.028338017428611313,\n \"acc_norm\": 0.34397163120567376,\n \"acc_norm_stderr\": 0.028338017428611313\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.34159061277705344,\n \"acc_stderr\": 0.012112391320842858,\n \"acc_norm\": 0.34159061277705344,\n \"acc_norm_stderr\": 0.012112391320842858\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.4007352941176471,\n \"acc_stderr\": 0.02976826352893311,\n \"acc_norm\": 0.4007352941176471,\n \"acc_norm_stderr\": 0.02976826352893311\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.3790849673202614,\n \"acc_stderr\": 0.019627444748412236,\n \"acc_norm\": 0.3790849673202614,\n \"acc_norm_stderr\": 0.019627444748412236\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.49795918367346936,\n \"acc_stderr\": 0.0320089533497105,\n \"acc_norm\": 0.49795918367346936,\n \"acc_norm_stderr\": 0.0320089533497105\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.6169154228855721,\n \"acc_stderr\": 0.034375193373382504,\n \"acc_norm\": 0.6169154228855721,\n \"acc_norm_stderr\": 0.034375193373382504\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.59,\n \"acc_stderr\": 0.04943110704237102,\n \"acc_norm\": 0.59,\n \"acc_norm_stderr\": 0.04943110704237102\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39156626506024095,\n \"acc_stderr\": 0.03799857454479637,\n \"acc_norm\": 0.39156626506024095,\n \"acc_norm_stderr\": 0.03799857454479637\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.5614035087719298,\n \"acc_stderr\": 0.038057975055904594,\n \"acc_norm\": 0.5614035087719298,\n \"acc_norm_stderr\": 0.038057975055904594\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.23745410036719705,\n \"mc1_stderr\": 0.014896277441041836,\n \"mc2\": 0.39408163994148815,\n \"mc2_stderr\": 0.014247164817369498\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.6172059984214681,\n \"acc_stderr\": 0.013660946109442006\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.2441243366186505,\n \"acc_stderr\": 0.011832404674077594\n }\n}\n```", "repo_url": "https://huggingface.co/KnutJaegersberg/Qwen-1_8B-Llamafied", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-29-46.400920.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["**/details_harness|winogrande|5_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-29-46.400920.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_29_46.400920", "path": ["results_2024-01-04T14-29-46.400920.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-29-46.400920.parquet"]}]}]}
2024-01-04T14:32:16+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of KnutJaegersberg/Qwen-1_8B-Llamafied Dataset automatically created during the evaluation run of model KnutJaegersberg/Qwen-1_8B-Llamafied on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:29:46.400920(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of KnutJaegersberg/Qwen-1_8B-Llamafied\n\n\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/Qwen-1_8B-Llamafied on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:29:46.400920(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of KnutJaegersberg/Qwen-1_8B-Llamafied\n\n\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/Qwen-1_8B-Llamafied on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:29:46.400920(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 195, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of KnutJaegersberg/Qwen-1_8B-Llamafied\n\n\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/Qwen-1_8B-Llamafied on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:29:46.400920(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]" ]
e881e7f43f8f8e89d6cb35b9b01a35988829daff
# Dataset Card for Evaluation run of gagan3012/MetaModelv2 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [gagan3012/MetaModelv2](https://huggingface.co/gagan3012/MetaModelv2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_gagan3012__MetaModelv2", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:41:37.767786](https://huggingface.co/datasets/open-llm-leaderboard/details_gagan3012__MetaModelv2/blob/main/results_2024-01-04T14-41-37.767786.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6665361592524657, "acc_stderr": 0.03162222867188688, "acc_norm": 0.6674781511292511, "acc_norm_stderr": 0.03226398387608882, "mc1": 0.5691554467564259, "mc1_stderr": 0.01733527247533237, "mc2": 0.7193907636520086, "mc2_stderr": 0.014961127066631282 }, "harness|arc:challenge|25": { "acc": 0.6843003412969283, "acc_stderr": 0.01358257109581529, "acc_norm": 0.7107508532423208, "acc_norm_stderr": 0.013250012579393441 }, "harness|hellaswag|10": { "acc": 0.7140011949810795, "acc_stderr": 0.00450965267939568, "acc_norm": 0.8855805616411073, "acc_norm_stderr": 0.003176694564511078 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.41, "acc_stderr": 0.049431107042371025, "acc_norm": 0.41, "acc_norm_stderr": 0.049431107042371025 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6148148148148148, "acc_stderr": 0.04203921040156279, "acc_norm": 0.6148148148148148, "acc_norm_stderr": 0.04203921040156279 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.7368421052631579, "acc_stderr": 0.03583496176361072, "acc_norm": 0.7368421052631579, "acc_norm_stderr": 0.03583496176361072 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6867924528301886, "acc_stderr": 0.028544793319055326, "acc_norm": 0.6867924528301886, "acc_norm_stderr": 0.028544793319055326 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7708333333333334, "acc_stderr": 0.03514697467862388, "acc_norm": 0.7708333333333334, "acc_norm_stderr": 0.03514697467862388 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.45, "acc_stderr": 0.05, "acc_norm": 0.45, "acc_norm_stderr": 0.05 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6705202312138728, "acc_stderr": 0.03583901754736412, "acc_norm": 0.6705202312138728, "acc_norm_stderr": 0.03583901754736412 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.39215686274509803, "acc_stderr": 0.04858083574266346, "acc_norm": 0.39215686274509803, "acc_norm_stderr": 0.04858083574266346 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.76, "acc_stderr": 0.042923469599092816, "acc_norm": 0.76, "acc_norm_stderr": 0.042923469599092816 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6212765957446809, "acc_stderr": 0.03170995606040655, "acc_norm": 0.6212765957446809, "acc_norm_stderr": 0.03170995606040655 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.5, "acc_stderr": 0.047036043419179864, "acc_norm": 0.5, "acc_norm_stderr": 0.047036043419179864 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6137931034482759, "acc_stderr": 0.04057324734419036, "acc_norm": 0.6137931034482759, "acc_norm_stderr": 0.04057324734419036 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4894179894179894, "acc_stderr": 0.025745542276045478, "acc_norm": 0.4894179894179894, "acc_norm_stderr": 0.025745542276045478 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4523809523809524, "acc_stderr": 0.044518079590553275, "acc_norm": 0.4523809523809524, "acc_norm_stderr": 0.044518079590553275 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.048241815132442176, "acc_norm": 0.36, "acc_norm_stderr": 0.048241815132442176 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.8129032258064516, "acc_stderr": 0.022185710092252252, "acc_norm": 0.8129032258064516, "acc_norm_stderr": 0.022185710092252252 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5024630541871922, "acc_stderr": 0.03517945038691063, "acc_norm": 0.5024630541871922, "acc_norm_stderr": 0.03517945038691063 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.72, "acc_stderr": 0.04512608598542128, "acc_norm": 0.72, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.8121212121212121, "acc_stderr": 0.03050193405942914, "acc_norm": 0.8121212121212121, "acc_norm_stderr": 0.03050193405942914 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8686868686868687, "acc_stderr": 0.024063156416822516, "acc_norm": 0.8686868686868687, "acc_norm_stderr": 0.024063156416822516 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.9015544041450777, "acc_stderr": 0.02150024957603347, "acc_norm": 0.9015544041450777, "acc_norm_stderr": 0.02150024957603347 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6666666666666666, "acc_stderr": 0.023901157979402538, "acc_norm": 0.6666666666666666, "acc_norm_stderr": 0.023901157979402538 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.36666666666666664, "acc_stderr": 0.029381620726465073, "acc_norm": 0.36666666666666664, "acc_norm_stderr": 0.029381620726465073 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.7100840336134454, "acc_stderr": 0.029472485833136094, "acc_norm": 0.7100840336134454, "acc_norm_stderr": 0.029472485833136094 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.36423841059602646, "acc_stderr": 0.03929111781242742, "acc_norm": 0.36423841059602646, "acc_norm_stderr": 0.03929111781242742 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8477064220183487, "acc_stderr": 0.015405084393157074, "acc_norm": 0.8477064220183487, "acc_norm_stderr": 0.015405084393157074 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5648148148148148, "acc_stderr": 0.03381200005643527, "acc_norm": 0.5648148148148148, "acc_norm_stderr": 0.03381200005643527 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8480392156862745, "acc_stderr": 0.0251956584289318, "acc_norm": 0.8480392156862745, "acc_norm_stderr": 0.0251956584289318 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8523206751054853, "acc_stderr": 0.0230943295825957, "acc_norm": 0.8523206751054853, "acc_norm_stderr": 0.0230943295825957 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6771300448430493, "acc_stderr": 0.03138147637575499, "acc_norm": 0.6771300448430493, "acc_norm_stderr": 0.03138147637575499 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7480916030534351, "acc_stderr": 0.03807387116306086, "acc_norm": 0.7480916030534351, "acc_norm_stderr": 0.03807387116306086 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7851239669421488, "acc_stderr": 0.03749492448709696, "acc_norm": 0.7851239669421488, "acc_norm_stderr": 0.03749492448709696 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.8148148148148148, "acc_stderr": 0.03755265865037182, "acc_norm": 0.8148148148148148, "acc_norm_stderr": 0.03755265865037182 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7607361963190185, "acc_stderr": 0.033519538795212696, "acc_norm": 0.7607361963190185, "acc_norm_stderr": 0.033519538795212696 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.8543689320388349, "acc_stderr": 0.03492606476623791, "acc_norm": 0.8543689320388349, "acc_norm_stderr": 0.03492606476623791 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.0230866350868414, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.0230866350868414 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.69, "acc_stderr": 0.04648231987117316, "acc_norm": 0.69, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8109833971902938, "acc_stderr": 0.014000791294407, "acc_norm": 0.8109833971902938, "acc_norm_stderr": 0.014000791294407 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7485549132947977, "acc_stderr": 0.023357365785874037, "acc_norm": 0.7485549132947977, "acc_norm_stderr": 0.023357365785874037 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.40558659217877097, "acc_stderr": 0.01642167050633918, "acc_norm": 0.40558659217877097, "acc_norm_stderr": 0.01642167050633918 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7549019607843137, "acc_stderr": 0.02463004897982478, "acc_norm": 0.7549019607843137, "acc_norm_stderr": 0.02463004897982478 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.7331189710610932, "acc_stderr": 0.025122637608816646, "acc_norm": 0.7331189710610932, "acc_norm_stderr": 0.025122637608816646 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7777777777777778, "acc_stderr": 0.02313237623454333, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.02313237623454333 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.4929078014184397, "acc_stderr": 0.02982449855912901, "acc_norm": 0.4929078014184397, "acc_norm_stderr": 0.02982449855912901 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4869621903520209, "acc_stderr": 0.012765893883835332, "acc_norm": 0.4869621903520209, "acc_norm_stderr": 0.012765893883835332 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.75, "acc_stderr": 0.026303648393696036, "acc_norm": 0.75, "acc_norm_stderr": 0.026303648393696036 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6830065359477124, "acc_stderr": 0.01882421951270621, "acc_norm": 0.6830065359477124, "acc_norm_stderr": 0.01882421951270621 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6909090909090909, "acc_stderr": 0.044262946482000985, "acc_norm": 0.6909090909090909, "acc_norm_stderr": 0.044262946482000985 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7428571428571429, "acc_stderr": 0.02797982353874455, "acc_norm": 0.7428571428571429, "acc_norm_stderr": 0.02797982353874455 }, "harness|hendrycksTest-sociology|5": { "acc": 0.845771144278607, "acc_stderr": 0.025538433368578337, "acc_norm": 0.845771144278607, "acc_norm_stderr": 0.025538433368578337 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.92, "acc_stderr": 0.0272659924344291, "acc_norm": 0.92, "acc_norm_stderr": 0.0272659924344291 }, "harness|hendrycksTest-virology|5": { "acc": 0.5843373493975904, "acc_stderr": 0.03836722176598053, "acc_norm": 0.5843373493975904, "acc_norm_stderr": 0.03836722176598053 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.7719298245614035, "acc_stderr": 0.032180937956023566, "acc_norm": 0.7719298245614035, "acc_norm_stderr": 0.032180937956023566 }, "harness|truthfulqa:mc|0": { "mc1": 0.5691554467564259, "mc1_stderr": 0.01733527247533237, "mc2": 0.7193907636520086, "mc2_stderr": 0.014961127066631282 }, "harness|winogrande|5": { "acc": 0.8310970797158642, "acc_stderr": 0.010529981411838906 }, "harness|gsm8k|5": { "acc": 0.6444275966641395, "acc_stderr": 0.013185402252713852 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_gagan3012__MetaModelv2
[ "region:us" ]
2024-01-04T14:43:54+00:00
{"pretty_name": "Evaluation run of gagan3012/MetaModelv2", "dataset_summary": "Dataset automatically created during the evaluation run of model [gagan3012/MetaModelv2](https://huggingface.co/gagan3012/MetaModelv2) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_gagan3012__MetaModelv2\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:41:37.767786](https://huggingface.co/datasets/open-llm-leaderboard/details_gagan3012__MetaModelv2/blob/main/results_2024-01-04T14-41-37.767786.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6665361592524657,\n \"acc_stderr\": 0.03162222867188688,\n \"acc_norm\": 0.6674781511292511,\n \"acc_norm_stderr\": 0.03226398387608882,\n \"mc1\": 0.5691554467564259,\n \"mc1_stderr\": 0.01733527247533237,\n \"mc2\": 0.7193907636520086,\n \"mc2_stderr\": 0.014961127066631282\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6843003412969283,\n \"acc_stderr\": 0.01358257109581529,\n \"acc_norm\": 0.7107508532423208,\n \"acc_norm_stderr\": 0.013250012579393441\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.7140011949810795,\n \"acc_stderr\": 0.00450965267939568,\n \"acc_norm\": 0.8855805616411073,\n \"acc_norm_stderr\": 0.003176694564511078\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.41,\n \"acc_stderr\": 0.049431107042371025,\n \"acc_norm\": 0.41,\n \"acc_norm_stderr\": 0.049431107042371025\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6148148148148148,\n \"acc_stderr\": 0.04203921040156279,\n \"acc_norm\": 0.6148148148148148,\n \"acc_norm_stderr\": 0.04203921040156279\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.7368421052631579,\n \"acc_stderr\": 0.03583496176361072,\n \"acc_norm\": 0.7368421052631579,\n \"acc_norm_stderr\": 0.03583496176361072\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6867924528301886,\n \"acc_stderr\": 0.028544793319055326,\n \"acc_norm\": 0.6867924528301886,\n \"acc_norm_stderr\": 0.028544793319055326\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7708333333333334,\n \"acc_stderr\": 0.03514697467862388,\n \"acc_norm\": 0.7708333333333334,\n \"acc_norm_stderr\": 0.03514697467862388\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.45,\n \"acc_stderr\": 0.05,\n \"acc_norm\": 0.45,\n \"acc_norm_stderr\": 0.05\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6705202312138728,\n \"acc_stderr\": 0.03583901754736412,\n \"acc_norm\": 0.6705202312138728,\n \"acc_norm_stderr\": 0.03583901754736412\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.39215686274509803,\n \"acc_stderr\": 0.04858083574266346,\n \"acc_norm\": 0.39215686274509803,\n \"acc_norm_stderr\": 0.04858083574266346\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.76,\n \"acc_stderr\": 0.042923469599092816,\n \"acc_norm\": 0.76,\n \"acc_norm_stderr\": 0.042923469599092816\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6212765957446809,\n \"acc_stderr\": 0.03170995606040655,\n \"acc_norm\": 0.6212765957446809,\n \"acc_norm_stderr\": 0.03170995606040655\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.047036043419179864,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.047036043419179864\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6137931034482759,\n \"acc_stderr\": 0.04057324734419036,\n \"acc_norm\": 0.6137931034482759,\n \"acc_norm_stderr\": 0.04057324734419036\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4894179894179894,\n \"acc_stderr\": 0.025745542276045478,\n \"acc_norm\": 0.4894179894179894,\n \"acc_norm_stderr\": 0.025745542276045478\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4523809523809524,\n \"acc_stderr\": 0.044518079590553275,\n \"acc_norm\": 0.4523809523809524,\n \"acc_norm_stderr\": 0.044518079590553275\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.048241815132442176,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.048241815132442176\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.8129032258064516,\n \"acc_stderr\": 0.022185710092252252,\n \"acc_norm\": 0.8129032258064516,\n \"acc_norm_stderr\": 0.022185710092252252\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5024630541871922,\n \"acc_stderr\": 0.03517945038691063,\n \"acc_norm\": 0.5024630541871922,\n \"acc_norm_stderr\": 0.03517945038691063\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.72,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.72,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.8121212121212121,\n \"acc_stderr\": 0.03050193405942914,\n \"acc_norm\": 0.8121212121212121,\n \"acc_norm_stderr\": 0.03050193405942914\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8686868686868687,\n \"acc_stderr\": 0.024063156416822516,\n \"acc_norm\": 0.8686868686868687,\n \"acc_norm_stderr\": 0.024063156416822516\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.9015544041450777,\n \"acc_stderr\": 0.02150024957603347,\n \"acc_norm\": 0.9015544041450777,\n \"acc_norm_stderr\": 0.02150024957603347\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6666666666666666,\n \"acc_stderr\": 0.023901157979402538,\n \"acc_norm\": 0.6666666666666666,\n \"acc_norm_stderr\": 0.023901157979402538\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.36666666666666664,\n \"acc_stderr\": 0.029381620726465073,\n \"acc_norm\": 0.36666666666666664,\n \"acc_norm_stderr\": 0.029381620726465073\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.7100840336134454,\n \"acc_stderr\": 0.029472485833136094,\n \"acc_norm\": 0.7100840336134454,\n \"acc_norm_stderr\": 0.029472485833136094\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.36423841059602646,\n \"acc_stderr\": 0.03929111781242742,\n \"acc_norm\": 0.36423841059602646,\n \"acc_norm_stderr\": 0.03929111781242742\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8477064220183487,\n \"acc_stderr\": 0.015405084393157074,\n \"acc_norm\": 0.8477064220183487,\n \"acc_norm_stderr\": 0.015405084393157074\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5648148148148148,\n \"acc_stderr\": 0.03381200005643527,\n \"acc_norm\": 0.5648148148148148,\n \"acc_norm_stderr\": 0.03381200005643527\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8480392156862745,\n \"acc_stderr\": 0.0251956584289318,\n \"acc_norm\": 0.8480392156862745,\n \"acc_norm_stderr\": 0.0251956584289318\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8523206751054853,\n \"acc_stderr\": 0.0230943295825957,\n \"acc_norm\": 0.8523206751054853,\n \"acc_norm_stderr\": 0.0230943295825957\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6771300448430493,\n \"acc_stderr\": 0.03138147637575499,\n \"acc_norm\": 0.6771300448430493,\n \"acc_norm_stderr\": 0.03138147637575499\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7480916030534351,\n \"acc_stderr\": 0.03807387116306086,\n \"acc_norm\": 0.7480916030534351,\n \"acc_norm_stderr\": 0.03807387116306086\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7851239669421488,\n \"acc_stderr\": 0.03749492448709696,\n \"acc_norm\": 0.7851239669421488,\n \"acc_norm_stderr\": 0.03749492448709696\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.8148148148148148,\n \"acc_stderr\": 0.03755265865037182,\n \"acc_norm\": 0.8148148148148148,\n \"acc_norm_stderr\": 0.03755265865037182\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7607361963190185,\n \"acc_stderr\": 0.033519538795212696,\n \"acc_norm\": 0.7607361963190185,\n \"acc_norm_stderr\": 0.033519538795212696\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8543689320388349,\n \"acc_stderr\": 0.03492606476623791,\n \"acc_norm\": 0.8543689320388349,\n \"acc_norm_stderr\": 0.03492606476623791\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.0230866350868414,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.0230866350868414\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.69,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.69,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8109833971902938,\n \"acc_stderr\": 0.014000791294407,\n \"acc_norm\": 0.8109833971902938,\n \"acc_norm_stderr\": 0.014000791294407\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7485549132947977,\n \"acc_stderr\": 0.023357365785874037,\n \"acc_norm\": 0.7485549132947977,\n \"acc_norm_stderr\": 0.023357365785874037\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.40558659217877097,\n \"acc_stderr\": 0.01642167050633918,\n \"acc_norm\": 0.40558659217877097,\n \"acc_norm_stderr\": 0.01642167050633918\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7549019607843137,\n \"acc_stderr\": 0.02463004897982478,\n \"acc_norm\": 0.7549019607843137,\n \"acc_norm_stderr\": 0.02463004897982478\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.7331189710610932,\n \"acc_stderr\": 0.025122637608816646,\n \"acc_norm\": 0.7331189710610932,\n \"acc_norm_stderr\": 0.025122637608816646\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.02313237623454333,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.02313237623454333\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.4929078014184397,\n \"acc_stderr\": 0.02982449855912901,\n \"acc_norm\": 0.4929078014184397,\n \"acc_norm_stderr\": 0.02982449855912901\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4869621903520209,\n \"acc_stderr\": 0.012765893883835332,\n \"acc_norm\": 0.4869621903520209,\n \"acc_norm_stderr\": 0.012765893883835332\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.026303648393696036,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.026303648393696036\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6830065359477124,\n \"acc_stderr\": 0.01882421951270621,\n \"acc_norm\": 0.6830065359477124,\n \"acc_norm_stderr\": 0.01882421951270621\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6909090909090909,\n \"acc_stderr\": 0.044262946482000985,\n \"acc_norm\": 0.6909090909090909,\n \"acc_norm_stderr\": 0.044262946482000985\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7428571428571429,\n \"acc_stderr\": 0.02797982353874455,\n \"acc_norm\": 0.7428571428571429,\n \"acc_norm_stderr\": 0.02797982353874455\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.845771144278607,\n \"acc_stderr\": 0.025538433368578337,\n \"acc_norm\": 0.845771144278607,\n \"acc_norm_stderr\": 0.025538433368578337\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.92,\n \"acc_stderr\": 0.0272659924344291,\n \"acc_norm\": 0.92,\n \"acc_norm_stderr\": 0.0272659924344291\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5843373493975904,\n \"acc_stderr\": 0.03836722176598053,\n \"acc_norm\": 0.5843373493975904,\n \"acc_norm_stderr\": 0.03836722176598053\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.7719298245614035,\n \"acc_stderr\": 0.032180937956023566,\n \"acc_norm\": 0.7719298245614035,\n \"acc_norm_stderr\": 0.032180937956023566\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5691554467564259,\n \"mc1_stderr\": 0.01733527247533237,\n \"mc2\": 0.7193907636520086,\n \"mc2_stderr\": 0.014961127066631282\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8310970797158642,\n \"acc_stderr\": 0.010529981411838906\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.6444275966641395,\n \"acc_stderr\": 0.013185402252713852\n }\n}\n```", "repo_url": "https://huggingface.co/gagan3012/MetaModelv2", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-41-37.767786.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["**/details_harness|winogrande|5_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-41-37.767786.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_41_37.767786", "path": ["results_2024-01-04T14-41-37.767786.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-41-37.767786.parquet"]}]}]}
2024-01-04T14:44:17+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of gagan3012/MetaModelv2 Dataset automatically created during the evaluation run of model gagan3012/MetaModelv2 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:41:37.767786(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of gagan3012/MetaModelv2\n\n\n\nDataset automatically created during the evaluation run of model gagan3012/MetaModelv2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:41:37.767786(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of gagan3012/MetaModelv2\n\n\n\nDataset automatically created during the evaluation run of model gagan3012/MetaModelv2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:41:37.767786(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 179, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of gagan3012/MetaModelv2\n\n\n\nDataset automatically created during the evaluation run of model gagan3012/MetaModelv2 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:41:37.767786(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
56b05fcd5d22452bc2e651391066e5dc0a9575ce
# Dataset Card for Evaluation run of TomGrc/FusionNet_passthrough_v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [TomGrc/FusionNet_passthrough_v0.1](https://huggingface.co/TomGrc/FusionNet_passthrough_v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_TomGrc__FusionNet_passthrough_v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T14:44:26.926378](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FusionNet_passthrough_v0.1/blob/main/results_2024-01-04T14-44-26.926378.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6472236922412851, "acc_stderr": 0.03178909819977989, "acc_norm": 0.6565427665352351, "acc_norm_stderr": 0.032449474381421727, "mc1": 0.5006119951040392, "mc1_stderr": 0.01750348793889251, "mc2": 0.6767346790940523, "mc2_stderr": 0.015239478626171948 }, "harness|arc:challenge|25": { "acc": 0.6322525597269625, "acc_stderr": 0.01409099561816848, "acc_norm": 0.6945392491467577, "acc_norm_stderr": 0.013460080478002508 }, "harness|hellaswag|10": { "acc": 0.6413065126468831, "acc_stderr": 0.004786368011500458, "acc_norm": 0.877912766381199, "acc_norm_stderr": 0.003267174458449751 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411021, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411021 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.6074074074074074, "acc_stderr": 0.04218506215368879, "acc_norm": 0.6074074074074074, "acc_norm_stderr": 0.04218506215368879 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.75, "acc_stderr": 0.03523807393012047, "acc_norm": 0.75, "acc_norm_stderr": 0.03523807393012047 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.73, "acc_stderr": 0.0446196043338474, "acc_norm": 0.73, "acc_norm_stderr": 0.0446196043338474 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6830188679245283, "acc_stderr": 0.028637235639800893, "acc_norm": 0.6830188679245283, "acc_norm_stderr": 0.028637235639800893 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.7569444444444444, "acc_stderr": 0.03586879280080341, "acc_norm": 0.7569444444444444, "acc_norm_stderr": 0.03586879280080341 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.49, "acc_stderr": 0.05024183937956912, "acc_norm": 0.49, "acc_norm_stderr": 0.05024183937956912 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.6763005780346821, "acc_stderr": 0.03567603799639171, "acc_norm": 0.6763005780346821, "acc_norm_stderr": 0.03567603799639171 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.3627450980392157, "acc_stderr": 0.047840607041056527, "acc_norm": 0.3627450980392157, "acc_norm_stderr": 0.047840607041056527 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.77, "acc_stderr": 0.042295258468165065, "acc_norm": 0.77, "acc_norm_stderr": 0.042295258468165065 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.6, "acc_stderr": 0.03202563076101735, "acc_norm": 0.6, "acc_norm_stderr": 0.03202563076101735 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.49122807017543857, "acc_stderr": 0.04702880432049615, "acc_norm": 0.49122807017543857, "acc_norm_stderr": 0.04702880432049615 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.5862068965517241, "acc_stderr": 0.04104269211806232, "acc_norm": 0.5862068965517241, "acc_norm_stderr": 0.04104269211806232 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.4947089947089947, "acc_stderr": 0.02574986828855657, "acc_norm": 0.4947089947089947, "acc_norm_stderr": 0.02574986828855657 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4365079365079365, "acc_stderr": 0.04435932892851466, "acc_norm": 0.4365079365079365, "acc_norm_stderr": 0.04435932892851466 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.38, "acc_stderr": 0.04878317312145632, "acc_norm": 0.38, "acc_norm_stderr": 0.04878317312145632 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.7870967741935484, "acc_stderr": 0.023287665127268545, "acc_norm": 0.7870967741935484, "acc_norm_stderr": 0.023287665127268545 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.46798029556650245, "acc_stderr": 0.03510766597959217, "acc_norm": 0.46798029556650245, "acc_norm_stderr": 0.03510766597959217 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.68, "acc_stderr": 0.04688261722621504, "acc_norm": 0.68, "acc_norm_stderr": 0.04688261722621504 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.806060606060606, "acc_stderr": 0.030874145136562076, "acc_norm": 0.806060606060606, "acc_norm_stderr": 0.030874145136562076 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.8535353535353535, "acc_stderr": 0.02519092111460391, "acc_norm": 0.8535353535353535, "acc_norm_stderr": 0.02519092111460391 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.8860103626943006, "acc_stderr": 0.022935144053919432, "acc_norm": 0.8860103626943006, "acc_norm_stderr": 0.022935144053919432 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.6435897435897436, "acc_stderr": 0.024283140529467298, "acc_norm": 0.6435897435897436, "acc_norm_stderr": 0.024283140529467298 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.3333333333333333, "acc_stderr": 0.028742040903948482, "acc_norm": 0.3333333333333333, "acc_norm_stderr": 0.028742040903948482 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.6890756302521008, "acc_stderr": 0.03006676158297794, "acc_norm": 0.6890756302521008, "acc_norm_stderr": 0.03006676158297794 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.39072847682119205, "acc_stderr": 0.03983798306659807, "acc_norm": 0.39072847682119205, "acc_norm_stderr": 0.03983798306659807 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.8330275229357799, "acc_stderr": 0.01599015488507337, "acc_norm": 0.8330275229357799, "acc_norm_stderr": 0.01599015488507337 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.5509259259259259, "acc_stderr": 0.033922384053216174, "acc_norm": 0.5509259259259259, "acc_norm_stderr": 0.033922384053216174 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.8431372549019608, "acc_stderr": 0.02552472232455334, "acc_norm": 0.8431372549019608, "acc_norm_stderr": 0.02552472232455334 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.8523206751054853, "acc_stderr": 0.0230943295825957, "acc_norm": 0.8523206751054853, "acc_norm_stderr": 0.0230943295825957 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6591928251121076, "acc_stderr": 0.0318114974705536, "acc_norm": 0.6591928251121076, "acc_norm_stderr": 0.0318114974705536 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7404580152671756, "acc_stderr": 0.03844876139785271, "acc_norm": 0.7404580152671756, "acc_norm_stderr": 0.03844876139785271 }, "harness|hendrycksTest-international_law|5": { "acc": 0.7933884297520661, "acc_stderr": 0.03695980128098823, "acc_norm": 0.7933884297520661, "acc_norm_stderr": 0.03695980128098823 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.7870370370370371, "acc_stderr": 0.039578354719809805, "acc_norm": 0.7870370370370371, "acc_norm_stderr": 0.039578354719809805 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7423312883435583, "acc_stderr": 0.03436150827846917, "acc_norm": 0.7423312883435583, "acc_norm_stderr": 0.03436150827846917 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4732142857142857, "acc_stderr": 0.047389751192741546, "acc_norm": 0.4732142857142857, "acc_norm_stderr": 0.047389751192741546 }, "harness|hendrycksTest-management|5": { "acc": 0.8252427184466019, "acc_stderr": 0.03760178006026621, "acc_norm": 0.8252427184466019, "acc_norm_stderr": 0.03760178006026621 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8760683760683761, "acc_stderr": 0.021586494001281372, "acc_norm": 0.8760683760683761, "acc_norm_stderr": 0.021586494001281372 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.73, "acc_stderr": 0.044619604333847394, "acc_norm": 0.73, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.8135376756066411, "acc_stderr": 0.013927751372001512, "acc_norm": 0.8135376756066411, "acc_norm_stderr": 0.013927751372001512 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.7283236994219653, "acc_stderr": 0.023948512905468348, "acc_norm": 0.7283236994219653, "acc_norm_stderr": 0.023948512905468348 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.31620111731843575, "acc_stderr": 0.015551673652172552, "acc_norm": 0.31620111731843575, "acc_norm_stderr": 0.015551673652172552 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.7352941176470589, "acc_stderr": 0.025261691219729484, "acc_norm": 0.7352941176470589, "acc_norm_stderr": 0.025261691219729484 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6977491961414791, "acc_stderr": 0.026082700695399672, "acc_norm": 0.6977491961414791, "acc_norm_stderr": 0.026082700695399672 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7777777777777778, "acc_stderr": 0.023132376234543346, "acc_norm": 0.7777777777777778, "acc_norm_stderr": 0.023132376234543346 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.48936170212765956, "acc_stderr": 0.02982074719142248, "acc_norm": 0.48936170212765956, "acc_norm_stderr": 0.02982074719142248 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4810951760104302, "acc_stderr": 0.012761104871472652, "acc_norm": 0.4810951760104302, "acc_norm_stderr": 0.012761104871472652 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.7573529411764706, "acc_stderr": 0.02604066247420126, "acc_norm": 0.7573529411764706, "acc_norm_stderr": 0.02604066247420126 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6764705882352942, "acc_stderr": 0.018926082916083376, "acc_norm": 0.6764705882352942, "acc_norm_stderr": 0.018926082916083376 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.6818181818181818, "acc_stderr": 0.04461272175910508, "acc_norm": 0.6818181818181818, "acc_norm_stderr": 0.04461272175910508 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.7346938775510204, "acc_stderr": 0.028263889943784593, "acc_norm": 0.7346938775510204, "acc_norm_stderr": 0.028263889943784593 }, "harness|hendrycksTest-sociology|5": { "acc": 0.835820895522388, "acc_stderr": 0.026193923544454125, "acc_norm": 0.835820895522388, "acc_norm_stderr": 0.026193923544454125 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.89, "acc_stderr": 0.03144660377352203, "acc_norm": 0.89, "acc_norm_stderr": 0.03144660377352203 }, "harness|hendrycksTest-virology|5": { "acc": 0.5963855421686747, "acc_stderr": 0.03819486140758398, "acc_norm": 0.5963855421686747, "acc_norm_stderr": 0.03819486140758398 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8128654970760234, "acc_stderr": 0.029913127232368032, "acc_norm": 0.8128654970760234, "acc_norm_stderr": 0.029913127232368032 }, "harness|truthfulqa:mc|0": { "mc1": 0.5006119951040392, "mc1_stderr": 0.01750348793889251, "mc2": 0.6767346790940523, "mc2_stderr": 0.015239478626171948 }, "harness|winogrande|5": { "acc": 0.8153117600631413, "acc_stderr": 0.010905978112156883 }, "harness|gsm8k|5": { "acc": 0.22820318423047764, "acc_stderr": 0.011559914877317386 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_TomGrc__FusionNet_passthrough_v0.1
[ "region:us" ]
2024-01-04T14:46:46+00:00
{"pretty_name": "Evaluation run of TomGrc/FusionNet_passthrough_v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [TomGrc/FusionNet_passthrough_v0.1](https://huggingface.co/TomGrc/FusionNet_passthrough_v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_TomGrc__FusionNet_passthrough_v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T14:44:26.926378](https://huggingface.co/datasets/open-llm-leaderboard/details_TomGrc__FusionNet_passthrough_v0.1/blob/main/results_2024-01-04T14-44-26.926378.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6472236922412851,\n \"acc_stderr\": 0.03178909819977989,\n \"acc_norm\": 0.6565427665352351,\n \"acc_norm_stderr\": 0.032449474381421727,\n \"mc1\": 0.5006119951040392,\n \"mc1_stderr\": 0.01750348793889251,\n \"mc2\": 0.6767346790940523,\n \"mc2_stderr\": 0.015239478626171948\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.6322525597269625,\n \"acc_stderr\": 0.01409099561816848,\n \"acc_norm\": 0.6945392491467577,\n \"acc_norm_stderr\": 0.013460080478002508\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6413065126468831,\n \"acc_stderr\": 0.004786368011500458,\n \"acc_norm\": 0.877912766381199,\n \"acc_norm_stderr\": 0.003267174458449751\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411021,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411021\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.6074074074074074,\n \"acc_stderr\": 0.04218506215368879,\n \"acc_norm\": 0.6074074074074074,\n \"acc_norm_stderr\": 0.04218506215368879\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.75,\n \"acc_stderr\": 0.03523807393012047,\n \"acc_norm\": 0.75,\n \"acc_norm_stderr\": 0.03523807393012047\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.0446196043338474,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.0446196043338474\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6830188679245283,\n \"acc_stderr\": 0.028637235639800893,\n \"acc_norm\": 0.6830188679245283,\n \"acc_norm_stderr\": 0.028637235639800893\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.7569444444444444,\n \"acc_stderr\": 0.03586879280080341,\n \"acc_norm\": 0.7569444444444444,\n \"acc_norm_stderr\": 0.03586879280080341\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.49,\n \"acc_stderr\": 0.05024183937956912,\n \"acc_norm\": 0.49,\n \"acc_norm_stderr\": 0.05024183937956912\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.6763005780346821,\n \"acc_stderr\": 0.03567603799639171,\n \"acc_norm\": 0.6763005780346821,\n \"acc_norm_stderr\": 0.03567603799639171\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.3627450980392157,\n \"acc_stderr\": 0.047840607041056527,\n \"acc_norm\": 0.3627450980392157,\n \"acc_norm_stderr\": 0.047840607041056527\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.77,\n \"acc_stderr\": 0.042295258468165065,\n \"acc_norm\": 0.77,\n \"acc_norm_stderr\": 0.042295258468165065\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.6,\n \"acc_stderr\": 0.03202563076101735,\n \"acc_norm\": 0.6,\n \"acc_norm_stderr\": 0.03202563076101735\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.49122807017543857,\n \"acc_stderr\": 0.04702880432049615,\n \"acc_norm\": 0.49122807017543857,\n \"acc_norm_stderr\": 0.04702880432049615\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.5862068965517241,\n \"acc_stderr\": 0.04104269211806232,\n \"acc_norm\": 0.5862068965517241,\n \"acc_norm_stderr\": 0.04104269211806232\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.4947089947089947,\n \"acc_stderr\": 0.02574986828855657,\n \"acc_norm\": 0.4947089947089947,\n \"acc_norm_stderr\": 0.02574986828855657\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4365079365079365,\n \"acc_stderr\": 0.04435932892851466,\n \"acc_norm\": 0.4365079365079365,\n \"acc_norm_stderr\": 0.04435932892851466\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.38,\n \"acc_stderr\": 0.04878317312145632,\n \"acc_norm\": 0.38,\n \"acc_norm_stderr\": 0.04878317312145632\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.7870967741935484,\n \"acc_stderr\": 0.023287665127268545,\n \"acc_norm\": 0.7870967741935484,\n \"acc_norm_stderr\": 0.023287665127268545\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.46798029556650245,\n \"acc_stderr\": 0.03510766597959217,\n \"acc_norm\": 0.46798029556650245,\n \"acc_norm_stderr\": 0.03510766597959217\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.68,\n \"acc_stderr\": 0.04688261722621504,\n \"acc_norm\": 0.68,\n \"acc_norm_stderr\": 0.04688261722621504\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.806060606060606,\n \"acc_stderr\": 0.030874145136562076,\n \"acc_norm\": 0.806060606060606,\n \"acc_norm_stderr\": 0.030874145136562076\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.8535353535353535,\n \"acc_stderr\": 0.02519092111460391,\n \"acc_norm\": 0.8535353535353535,\n \"acc_norm_stderr\": 0.02519092111460391\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.8860103626943006,\n \"acc_stderr\": 0.022935144053919432,\n \"acc_norm\": 0.8860103626943006,\n \"acc_norm_stderr\": 0.022935144053919432\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.6435897435897436,\n \"acc_stderr\": 0.024283140529467298,\n \"acc_norm\": 0.6435897435897436,\n \"acc_norm_stderr\": 0.024283140529467298\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.3333333333333333,\n \"acc_stderr\": 0.028742040903948482,\n \"acc_norm\": 0.3333333333333333,\n \"acc_norm_stderr\": 0.028742040903948482\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.6890756302521008,\n \"acc_stderr\": 0.03006676158297794,\n \"acc_norm\": 0.6890756302521008,\n \"acc_norm_stderr\": 0.03006676158297794\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.39072847682119205,\n \"acc_stderr\": 0.03983798306659807,\n \"acc_norm\": 0.39072847682119205,\n \"acc_norm_stderr\": 0.03983798306659807\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.8330275229357799,\n \"acc_stderr\": 0.01599015488507337,\n \"acc_norm\": 0.8330275229357799,\n \"acc_norm_stderr\": 0.01599015488507337\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.5509259259259259,\n \"acc_stderr\": 0.033922384053216174,\n \"acc_norm\": 0.5509259259259259,\n \"acc_norm_stderr\": 0.033922384053216174\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.8431372549019608,\n \"acc_stderr\": 0.02552472232455334,\n \"acc_norm\": 0.8431372549019608,\n \"acc_norm_stderr\": 0.02552472232455334\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.8523206751054853,\n \"acc_stderr\": 0.0230943295825957,\n \"acc_norm\": 0.8523206751054853,\n \"acc_norm_stderr\": 0.0230943295825957\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6591928251121076,\n \"acc_stderr\": 0.0318114974705536,\n \"acc_norm\": 0.6591928251121076,\n \"acc_norm_stderr\": 0.0318114974705536\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7404580152671756,\n \"acc_stderr\": 0.03844876139785271,\n \"acc_norm\": 0.7404580152671756,\n \"acc_norm_stderr\": 0.03844876139785271\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.7933884297520661,\n \"acc_stderr\": 0.03695980128098823,\n \"acc_norm\": 0.7933884297520661,\n \"acc_norm_stderr\": 0.03695980128098823\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.7870370370370371,\n \"acc_stderr\": 0.039578354719809805,\n \"acc_norm\": 0.7870370370370371,\n \"acc_norm_stderr\": 0.039578354719809805\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7423312883435583,\n \"acc_stderr\": 0.03436150827846917,\n \"acc_norm\": 0.7423312883435583,\n \"acc_norm_stderr\": 0.03436150827846917\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4732142857142857,\n \"acc_stderr\": 0.047389751192741546,\n \"acc_norm\": 0.4732142857142857,\n \"acc_norm_stderr\": 0.047389751192741546\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.8252427184466019,\n \"acc_stderr\": 0.03760178006026621,\n \"acc_norm\": 0.8252427184466019,\n \"acc_norm_stderr\": 0.03760178006026621\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8760683760683761,\n \"acc_stderr\": 0.021586494001281372,\n \"acc_norm\": 0.8760683760683761,\n \"acc_norm_stderr\": 0.021586494001281372\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.73,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.73,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.8135376756066411,\n \"acc_stderr\": 0.013927751372001512,\n \"acc_norm\": 0.8135376756066411,\n \"acc_norm_stderr\": 0.013927751372001512\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.7283236994219653,\n \"acc_stderr\": 0.023948512905468348,\n \"acc_norm\": 0.7283236994219653,\n \"acc_norm_stderr\": 0.023948512905468348\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.31620111731843575,\n \"acc_stderr\": 0.015551673652172552,\n \"acc_norm\": 0.31620111731843575,\n \"acc_norm_stderr\": 0.015551673652172552\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.7352941176470589,\n \"acc_stderr\": 0.025261691219729484,\n \"acc_norm\": 0.7352941176470589,\n \"acc_norm_stderr\": 0.025261691219729484\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6977491961414791,\n \"acc_stderr\": 0.026082700695399672,\n \"acc_norm\": 0.6977491961414791,\n \"acc_norm_stderr\": 0.026082700695399672\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7777777777777778,\n \"acc_stderr\": 0.023132376234543346,\n \"acc_norm\": 0.7777777777777778,\n \"acc_norm_stderr\": 0.023132376234543346\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.48936170212765956,\n \"acc_stderr\": 0.02982074719142248,\n \"acc_norm\": 0.48936170212765956,\n \"acc_norm_stderr\": 0.02982074719142248\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4810951760104302,\n \"acc_stderr\": 0.012761104871472652,\n \"acc_norm\": 0.4810951760104302,\n \"acc_norm_stderr\": 0.012761104871472652\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.7573529411764706,\n \"acc_stderr\": 0.02604066247420126,\n \"acc_norm\": 0.7573529411764706,\n \"acc_norm_stderr\": 0.02604066247420126\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6764705882352942,\n \"acc_stderr\": 0.018926082916083376,\n \"acc_norm\": 0.6764705882352942,\n \"acc_norm_stderr\": 0.018926082916083376\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.6818181818181818,\n \"acc_stderr\": 0.04461272175910508,\n \"acc_norm\": 0.6818181818181818,\n \"acc_norm_stderr\": 0.04461272175910508\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.7346938775510204,\n \"acc_stderr\": 0.028263889943784593,\n \"acc_norm\": 0.7346938775510204,\n \"acc_norm_stderr\": 0.028263889943784593\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.835820895522388,\n \"acc_stderr\": 0.026193923544454125,\n \"acc_norm\": 0.835820895522388,\n \"acc_norm_stderr\": 0.026193923544454125\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.89,\n \"acc_stderr\": 0.03144660377352203,\n \"acc_norm\": 0.89,\n \"acc_norm_stderr\": 0.03144660377352203\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.5963855421686747,\n \"acc_stderr\": 0.03819486140758398,\n \"acc_norm\": 0.5963855421686747,\n \"acc_norm_stderr\": 0.03819486140758398\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8128654970760234,\n \"acc_stderr\": 0.029913127232368032,\n \"acc_norm\": 0.8128654970760234,\n \"acc_norm_stderr\": 0.029913127232368032\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5006119951040392,\n \"mc1_stderr\": 0.01750348793889251,\n \"mc2\": 0.6767346790940523,\n \"mc2_stderr\": 0.015239478626171948\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.8153117600631413,\n \"acc_stderr\": 0.010905978112156883\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.22820318423047764,\n \"acc_stderr\": 0.011559914877317386\n }\n}\n```", "repo_url": "https://huggingface.co/TomGrc/FusionNet_passthrough_v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T14-44-26.926378.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["**/details_harness|winogrande|5_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T14-44-26.926378.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T14_44_26.926378", "path": ["results_2024-01-04T14-44-26.926378.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T14-44-26.926378.parquet"]}]}]}
2024-01-04T14:47:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of TomGrc/FusionNet_passthrough_v0.1 Dataset automatically created during the evaluation run of model TomGrc/FusionNet_passthrough_v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T14:44:26.926378(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of TomGrc/FusionNet_passthrough_v0.1\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet_passthrough_v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:44:26.926378(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of TomGrc/FusionNet_passthrough_v0.1\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet_passthrough_v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T14:44:26.926378(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of TomGrc/FusionNet_passthrough_v0.1\n\n\n\nDataset automatically created during the evaluation run of model TomGrc/FusionNet_passthrough_v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T14:44:26.926378(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
71aebce1cea78129af3339a2744e99045b79bc38
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
jilp00/NousResearch-func-calling
[ "region:us" ]
2024-01-04T14:54:48+00:00
{"dataset_info": {"features": [{"name": "id", "dtype": "string"}, {"name": "conversations", "list": [{"name": "from", "dtype": "string"}, {"name": "value", "dtype": "string"}]}, {"name": "category", "dtype": "string"}, {"name": "subcategory", "dtype": "string"}, {"name": "task", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 3285167, "num_examples": 1100}], "download_size": 1057557, "dataset_size": 3285167}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-04T14:58:28+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 34, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
b47ceb5555b3bf8a628cf9394136221a9906645c
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
Yusuf23/iscep
[ "region:us" ]
2024-01-04T15:00:22+00:00
{}
2024-01-04T15:02:20+00:00
[]
[]
TAGS #region-us
# Dataset Card for Dataset Name This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 34, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
072878c24ab0f9cfaa905501841d6bc4d8853638
<video controls title="aMUSEd on mps"> <source src="https://huggingface.co/datasets/pcuenq/amused_mps/resolve/main/amused_mps.mp4" type="video/mp4">
pcuenq/amused_mps
[ "region:us" ]
2024-01-04T15:00:22+00:00
{}
2024-01-04T15:05:58+00:00
[]
[]
TAGS #region-us
<video controls title="aMUSEd on mps"> <source src="URL type="video/mp4">
[]
[ "TAGS\n#region-us \n" ]
[ 6 ]
[ "passage: TAGS\n#region-us \n" ]
218a34b7553096f98d6a7ea05e2b9b8f19f25bc6
# Dataset Card for Evaluation run of maximuslee07/llama-2-13b-rockwellautomation <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [maximuslee07/llama-2-13b-rockwellautomation](https://huggingface.co/maximuslee07/llama-2-13b-rockwellautomation) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_maximuslee07__llama-2-13b-rockwellautomation", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T15:04:41.345460](https://huggingface.co/datasets/open-llm-leaderboard/details_maximuslee07__llama-2-13b-rockwellautomation/blob/main/results_2024-01-04T15-04-41.345460.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.2511081603813156, "acc_stderr": 0.030845430828452477, "acc_norm": 0.2520379726970298, "acc_norm_stderr": 0.031666273642418356, "mc1": 0.24357405140758873, "mc1_stderr": 0.015026354824910782, "mc2": NaN, "mc2_stderr": NaN }, "harness|arc:challenge|25": { "acc": 0.23378839590443687, "acc_stderr": 0.012368225378507148, "acc_norm": 0.2815699658703072, "acc_norm_stderr": 0.013143376735009015 }, "harness|hellaswag|10": { "acc": 0.2548297151961761, "acc_stderr": 0.004348748730529937, "acc_norm": 0.2577175861382195, "acc_norm_stderr": 0.00436483800033562 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.26, "acc_stderr": 0.04408440022768077, "acc_norm": 0.26, "acc_norm_stderr": 0.04408440022768077 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.28888888888888886, "acc_stderr": 0.0391545063041425, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.0391545063041425 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.25, "acc_stderr": 0.03523807393012047, "acc_norm": 0.25, "acc_norm_stderr": 0.03523807393012047 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.18, "acc_stderr": 0.03861229196653695, "acc_norm": 0.18, "acc_norm_stderr": 0.03861229196653695 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.2830188679245283, "acc_stderr": 0.027724236492700907, "acc_norm": 0.2830188679245283, "acc_norm_stderr": 0.027724236492700907 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.31, "acc_stderr": 0.04648231987117316, "acc_norm": 0.31, "acc_norm_stderr": 0.04648231987117316 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.24855491329479767, "acc_stderr": 0.03295304696818318, "acc_norm": 0.24855491329479767, "acc_norm_stderr": 0.03295304696818318 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.27450980392156865, "acc_stderr": 0.044405219061793275, "acc_norm": 0.27450980392156865, "acc_norm_stderr": 0.044405219061793275 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.2, "acc_stderr": 0.026148818018424502, "acc_norm": 0.2, "acc_norm_stderr": 0.026148818018424502 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2631578947368421, "acc_stderr": 0.041424397194893624, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.041424397194893624 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.3103448275862069, "acc_stderr": 0.03855289616378948, "acc_norm": 0.3103448275862069, "acc_norm_stderr": 0.03855289616378948 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.2619047619047619, "acc_stderr": 0.022644212615525218, "acc_norm": 0.2619047619047619, "acc_norm_stderr": 0.022644212615525218 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.29365079365079366, "acc_stderr": 0.040735243221471255, "acc_norm": 0.29365079365079366, "acc_norm_stderr": 0.040735243221471255 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.28, "acc_stderr": 0.04512608598542128, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542128 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.25161290322580643, "acc_stderr": 0.024685979286239963, "acc_norm": 0.25161290322580643, "acc_norm_stderr": 0.024685979286239963 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.2019704433497537, "acc_stderr": 0.02824735012218026, "acc_norm": 0.2019704433497537, "acc_norm_stderr": 0.02824735012218026 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.2787878787878788, "acc_stderr": 0.03501438706296781, "acc_norm": 0.2787878787878788, "acc_norm_stderr": 0.03501438706296781 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.2878787878787879, "acc_stderr": 0.03225883512300992, "acc_norm": 0.2878787878787879, "acc_norm_stderr": 0.03225883512300992 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.23316062176165803, "acc_stderr": 0.030516111371476008, "acc_norm": 0.23316062176165803, "acc_norm_stderr": 0.030516111371476008 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.21794871794871795, "acc_stderr": 0.02093244577446317, "acc_norm": 0.21794871794871795, "acc_norm_stderr": 0.02093244577446317 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.28888888888888886, "acc_stderr": 0.027634907264178544, "acc_norm": 0.28888888888888886, "acc_norm_stderr": 0.027634907264178544 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.25630252100840334, "acc_stderr": 0.02835962087053395, "acc_norm": 0.25630252100840334, "acc_norm_stderr": 0.02835962087053395 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2582781456953642, "acc_stderr": 0.035737053147634576, "acc_norm": 0.2582781456953642, "acc_norm_stderr": 0.035737053147634576 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.25137614678899084, "acc_stderr": 0.018599206360287415, "acc_norm": 0.25137614678899084, "acc_norm_stderr": 0.018599206360287415 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.2037037037037037, "acc_stderr": 0.027467401804058014, "acc_norm": 0.2037037037037037, "acc_norm_stderr": 0.027467401804058014 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.20098039215686275, "acc_stderr": 0.028125972265654355, "acc_norm": 0.20098039215686275, "acc_norm_stderr": 0.028125972265654355 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.24472573839662448, "acc_stderr": 0.027985699387036416, "acc_norm": 0.24472573839662448, "acc_norm_stderr": 0.027985699387036416 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.1210762331838565, "acc_stderr": 0.02189417411318574, "acc_norm": 0.1210762331838565, "acc_norm_stderr": 0.02189417411318574 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.25190839694656486, "acc_stderr": 0.03807387116306085, "acc_norm": 0.25190839694656486, "acc_norm_stderr": 0.03807387116306085 }, "harness|hendrycksTest-international_law|5": { "acc": 0.2809917355371901, "acc_stderr": 0.041032038305145124, "acc_norm": 0.2809917355371901, "acc_norm_stderr": 0.041032038305145124 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.25925925925925924, "acc_stderr": 0.04236511258094632, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.04236511258094632 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.2392638036809816, "acc_stderr": 0.03351953879521269, "acc_norm": 0.2392638036809816, "acc_norm_stderr": 0.03351953879521269 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.26785714285714285, "acc_stderr": 0.04203277291467764, "acc_norm": 0.26785714285714285, "acc_norm_stderr": 0.04203277291467764 }, "harness|hendrycksTest-management|5": { "acc": 0.20388349514563106, "acc_stderr": 0.039891398595317706, "acc_norm": 0.20388349514563106, "acc_norm_stderr": 0.039891398595317706 }, "harness|hendrycksTest-marketing|5": { "acc": 0.25213675213675213, "acc_stderr": 0.02844796547623101, "acc_norm": 0.25213675213675213, "acc_norm_stderr": 0.02844796547623101 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.22349936143039592, "acc_stderr": 0.014897235229450708, "acc_norm": 0.22349936143039592, "acc_norm_stderr": 0.014897235229450708 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.25722543352601157, "acc_stderr": 0.023532925431044276, "acc_norm": 0.25722543352601157, "acc_norm_stderr": 0.023532925431044276 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.21787709497206703, "acc_stderr": 0.013806211780732972, "acc_norm": 0.21787709497206703, "acc_norm_stderr": 0.013806211780732972 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.24836601307189543, "acc_stderr": 0.024739981355113592, "acc_norm": 0.24836601307189543, "acc_norm_stderr": 0.024739981355113592 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.26366559485530544, "acc_stderr": 0.02502553850053234, "acc_norm": 0.26366559485530544, "acc_norm_stderr": 0.02502553850053234 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.25925925925925924, "acc_stderr": 0.02438366553103546, "acc_norm": 0.25925925925925924, "acc_norm_stderr": 0.02438366553103546 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.26595744680851063, "acc_stderr": 0.026358065698880585, "acc_norm": 0.26595744680851063, "acc_norm_stderr": 0.026358065698880585 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2392438070404172, "acc_stderr": 0.010896123652676644, "acc_norm": 0.2392438070404172, "acc_norm_stderr": 0.010896123652676644 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.23897058823529413, "acc_stderr": 0.025905280644893006, "acc_norm": 0.23897058823529413, "acc_norm_stderr": 0.025905280644893006 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.27941176470588236, "acc_stderr": 0.018152871051538816, "acc_norm": 0.27941176470588236, "acc_norm_stderr": 0.018152871051538816 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.22727272727272727, "acc_stderr": 0.040139645540727756, "acc_norm": 0.22727272727272727, "acc_norm_stderr": 0.040139645540727756 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.23673469387755103, "acc_stderr": 0.02721283588407315, "acc_norm": 0.23673469387755103, "acc_norm_stderr": 0.02721283588407315 }, "harness|hendrycksTest-sociology|5": { "acc": 0.2935323383084577, "acc_stderr": 0.03220024104534205, "acc_norm": 0.2935323383084577, "acc_norm_stderr": 0.03220024104534205 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.25, "acc_stderr": 0.04351941398892446, "acc_norm": 0.25, "acc_norm_stderr": 0.04351941398892446 }, "harness|hendrycksTest-virology|5": { "acc": 0.21686746987951808, "acc_stderr": 0.03208284450356365, "acc_norm": 0.21686746987951808, "acc_norm_stderr": 0.03208284450356365 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.2631578947368421, "acc_stderr": 0.033773102522091945, "acc_norm": 0.2631578947368421, "acc_norm_stderr": 0.033773102522091945 }, "harness|truthfulqa:mc|0": { "mc1": 0.24357405140758873, "mc1_stderr": 0.015026354824910782, "mc2": NaN, "mc2_stderr": NaN }, "harness|winogrande|5": { "acc": 0.4980268350434096, "acc_stderr": 0.014052376259225632 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_maximuslee07__llama-2-13b-rockwellautomation
[ "region:us" ]
2024-01-04T15:07:01+00:00
{"pretty_name": "Evaluation run of maximuslee07/llama-2-13b-rockwellautomation", "dataset_summary": "Dataset automatically created during the evaluation run of model [maximuslee07/llama-2-13b-rockwellautomation](https://huggingface.co/maximuslee07/llama-2-13b-rockwellautomation) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_maximuslee07__llama-2-13b-rockwellautomation\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T15:04:41.345460](https://huggingface.co/datasets/open-llm-leaderboard/details_maximuslee07__llama-2-13b-rockwellautomation/blob/main/results_2024-01-04T15-04-41.345460.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.2511081603813156,\n \"acc_stderr\": 0.030845430828452477,\n \"acc_norm\": 0.2520379726970298,\n \"acc_norm_stderr\": 0.031666273642418356,\n \"mc1\": 0.24357405140758873,\n \"mc1_stderr\": 0.015026354824910782,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.23378839590443687,\n \"acc_stderr\": 0.012368225378507148,\n \"acc_norm\": 0.2815699658703072,\n \"acc_norm_stderr\": 0.013143376735009015\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.2548297151961761,\n \"acc_stderr\": 0.004348748730529937,\n \"acc_norm\": 0.2577175861382195,\n \"acc_norm_stderr\": 0.00436483800033562\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.26,\n \"acc_stderr\": 0.04408440022768077,\n \"acc_norm\": 0.26,\n \"acc_norm_stderr\": 0.04408440022768077\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.28888888888888886,\n \"acc_stderr\": 0.0391545063041425,\n \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.0391545063041425\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.03523807393012047,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.03523807393012047\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.18,\n \"acc_stderr\": 0.03861229196653695,\n \"acc_norm\": 0.18,\n \"acc_norm_stderr\": 0.03861229196653695\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.2830188679245283,\n \"acc_stderr\": 0.027724236492700907,\n \"acc_norm\": 0.2830188679245283,\n \"acc_norm_stderr\": 0.027724236492700907\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2638888888888889,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.2638888888888889,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.31,\n \"acc_stderr\": 0.04648231987117316,\n \"acc_norm\": 0.31,\n \"acc_norm_stderr\": 0.04648231987117316\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.24855491329479767,\n \"acc_stderr\": 0.03295304696818318,\n \"acc_norm\": 0.24855491329479767,\n \"acc_norm_stderr\": 0.03295304696818318\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.27450980392156865,\n \"acc_stderr\": 0.044405219061793275,\n \"acc_norm\": 0.27450980392156865,\n \"acc_norm_stderr\": 0.044405219061793275\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.2,\n \"acc_stderr\": 0.026148818018424502,\n \"acc_norm\": 0.2,\n \"acc_norm_stderr\": 0.026148818018424502\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.041424397194893624,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.041424397194893624\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.3103448275862069,\n \"acc_stderr\": 0.03855289616378948,\n \"acc_norm\": 0.3103448275862069,\n \"acc_norm_stderr\": 0.03855289616378948\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.2619047619047619,\n \"acc_stderr\": 0.022644212615525218,\n \"acc_norm\": 0.2619047619047619,\n \"acc_norm_stderr\": 0.022644212615525218\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.29365079365079366,\n \"acc_stderr\": 0.040735243221471255,\n \"acc_norm\": 0.29365079365079366,\n \"acc_norm_stderr\": 0.040735243221471255\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542128,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542128\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.25161290322580643,\n \"acc_stderr\": 0.024685979286239963,\n \"acc_norm\": 0.25161290322580643,\n \"acc_norm_stderr\": 0.024685979286239963\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.2019704433497537,\n \"acc_stderr\": 0.02824735012218026,\n \"acc_norm\": 0.2019704433497537,\n \"acc_norm_stderr\": 0.02824735012218026\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.2787878787878788,\n \"acc_stderr\": 0.03501438706296781,\n \"acc_norm\": 0.2787878787878788,\n \"acc_norm_stderr\": 0.03501438706296781\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.2878787878787879,\n \"acc_stderr\": 0.03225883512300992,\n \"acc_norm\": 0.2878787878787879,\n \"acc_norm_stderr\": 0.03225883512300992\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.23316062176165803,\n \"acc_stderr\": 0.030516111371476008,\n \"acc_norm\": 0.23316062176165803,\n \"acc_norm_stderr\": 0.030516111371476008\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.21794871794871795,\n \"acc_stderr\": 0.02093244577446317,\n \"acc_norm\": 0.21794871794871795,\n \"acc_norm_stderr\": 0.02093244577446317\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.28888888888888886,\n \"acc_stderr\": 0.027634907264178544,\n \"acc_norm\": 0.28888888888888886,\n \"acc_norm_stderr\": 0.027634907264178544\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.25630252100840334,\n \"acc_stderr\": 0.02835962087053395,\n \"acc_norm\": 0.25630252100840334,\n \"acc_norm_stderr\": 0.02835962087053395\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2582781456953642,\n \"acc_stderr\": 0.035737053147634576,\n \"acc_norm\": 0.2582781456953642,\n \"acc_norm_stderr\": 0.035737053147634576\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.25137614678899084,\n \"acc_stderr\": 0.018599206360287415,\n \"acc_norm\": 0.25137614678899084,\n \"acc_norm_stderr\": 0.018599206360287415\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.2037037037037037,\n \"acc_stderr\": 0.027467401804058014,\n \"acc_norm\": 0.2037037037037037,\n \"acc_norm_stderr\": 0.027467401804058014\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.20098039215686275,\n \"acc_stderr\": 0.028125972265654355,\n \"acc_norm\": 0.20098039215686275,\n \"acc_norm_stderr\": 0.028125972265654355\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.24472573839662448,\n \"acc_stderr\": 0.027985699387036416,\n \"acc_norm\": 0.24472573839662448,\n \"acc_norm_stderr\": 0.027985699387036416\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.1210762331838565,\n \"acc_stderr\": 0.02189417411318574,\n \"acc_norm\": 0.1210762331838565,\n \"acc_norm_stderr\": 0.02189417411318574\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.25190839694656486,\n \"acc_stderr\": 0.03807387116306085,\n \"acc_norm\": 0.25190839694656486,\n \"acc_norm_stderr\": 0.03807387116306085\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.2809917355371901,\n \"acc_stderr\": 0.041032038305145124,\n \"acc_norm\": 0.2809917355371901,\n \"acc_norm_stderr\": 0.041032038305145124\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.04236511258094632,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.04236511258094632\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.2392638036809816,\n \"acc_stderr\": 0.03351953879521269,\n \"acc_norm\": 0.2392638036809816,\n \"acc_norm_stderr\": 0.03351953879521269\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.26785714285714285,\n \"acc_stderr\": 0.04203277291467764,\n \"acc_norm\": 0.26785714285714285,\n \"acc_norm_stderr\": 0.04203277291467764\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.20388349514563106,\n \"acc_stderr\": 0.039891398595317706,\n \"acc_norm\": 0.20388349514563106,\n \"acc_norm_stderr\": 0.039891398595317706\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.25213675213675213,\n \"acc_stderr\": 0.02844796547623101,\n \"acc_norm\": 0.25213675213675213,\n \"acc_norm_stderr\": 0.02844796547623101\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.22349936143039592,\n \"acc_stderr\": 0.014897235229450708,\n \"acc_norm\": 0.22349936143039592,\n \"acc_norm_stderr\": 0.014897235229450708\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.25722543352601157,\n \"acc_stderr\": 0.023532925431044276,\n \"acc_norm\": 0.25722543352601157,\n \"acc_norm_stderr\": 0.023532925431044276\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.21787709497206703,\n \"acc_stderr\": 0.013806211780732972,\n \"acc_norm\": 0.21787709497206703,\n \"acc_norm_stderr\": 0.013806211780732972\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.24836601307189543,\n \"acc_stderr\": 0.024739981355113592,\n \"acc_norm\": 0.24836601307189543,\n \"acc_norm_stderr\": 0.024739981355113592\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.26366559485530544,\n \"acc_stderr\": 0.02502553850053234,\n \"acc_norm\": 0.26366559485530544,\n \"acc_norm_stderr\": 0.02502553850053234\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.25925925925925924,\n \"acc_stderr\": 0.02438366553103546,\n \"acc_norm\": 0.25925925925925924,\n \"acc_norm_stderr\": 0.02438366553103546\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.26595744680851063,\n \"acc_stderr\": 0.026358065698880585,\n \"acc_norm\": 0.26595744680851063,\n \"acc_norm_stderr\": 0.026358065698880585\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2392438070404172,\n \"acc_stderr\": 0.010896123652676644,\n \"acc_norm\": 0.2392438070404172,\n \"acc_norm_stderr\": 0.010896123652676644\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.23897058823529413,\n \"acc_stderr\": 0.025905280644893006,\n \"acc_norm\": 0.23897058823529413,\n \"acc_norm_stderr\": 0.025905280644893006\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.27941176470588236,\n \"acc_stderr\": 0.018152871051538816,\n \"acc_norm\": 0.27941176470588236,\n \"acc_norm_stderr\": 0.018152871051538816\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.22727272727272727,\n \"acc_stderr\": 0.040139645540727756,\n \"acc_norm\": 0.22727272727272727,\n \"acc_norm_stderr\": 0.040139645540727756\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.23673469387755103,\n \"acc_stderr\": 0.02721283588407315,\n \"acc_norm\": 0.23673469387755103,\n \"acc_norm_stderr\": 0.02721283588407315\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.2935323383084577,\n \"acc_stderr\": 0.03220024104534205,\n \"acc_norm\": 0.2935323383084577,\n \"acc_norm_stderr\": 0.03220024104534205\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.25,\n \"acc_stderr\": 0.04351941398892446,\n \"acc_norm\": 0.25,\n \"acc_norm_stderr\": 0.04351941398892446\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.21686746987951808,\n \"acc_stderr\": 0.03208284450356365,\n \"acc_norm\": 0.21686746987951808,\n \"acc_norm_stderr\": 0.03208284450356365\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.2631578947368421,\n \"acc_stderr\": 0.033773102522091945,\n \"acc_norm\": 0.2631578947368421,\n \"acc_norm_stderr\": 0.033773102522091945\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.24357405140758873,\n \"mc1_stderr\": 0.015026354824910782,\n \"mc2\": NaN,\n \"mc2_stderr\": NaN\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.4980268350434096,\n \"acc_stderr\": 0.014052376259225632\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/maximuslee07/llama-2-13b-rockwellautomation", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|arc:challenge|25_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|gsm8k|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hellaswag|10_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T15-04-41.345460.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["**/details_harness|winogrande|5_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T15-04-41.345460.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T15_04_41.345460", "path": ["results_2024-01-04T15-04-41.345460.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T15-04-41.345460.parquet"]}]}]}
2024-01-04T15:07:26+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of maximuslee07/llama-2-13b-rockwellautomation Dataset automatically created during the evaluation run of model maximuslee07/llama-2-13b-rockwellautomation on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T15:04:41.345460(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of maximuslee07/llama-2-13b-rockwellautomation\n\n\n\nDataset automatically created during the evaluation run of model maximuslee07/llama-2-13b-rockwellautomation on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T15:04:41.345460(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of maximuslee07/llama-2-13b-rockwellautomation\n\n\n\nDataset automatically created during the evaluation run of model maximuslee07/llama-2-13b-rockwellautomation on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T15:04:41.345460(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 189, 67, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of maximuslee07/llama-2-13b-rockwellautomation\n\n\n\nDataset automatically created during the evaluation run of model maximuslee07/llama-2-13b-rockwellautomation on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T15:04:41.345460(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
a8c6894407e504e6de0f144268a1b111d3e8ecb2
# Dataset Card for Dataset Name <!-- Provide a quick summary of the dataset. --> This dataset card aims to be a base template for new datasets. It has been generated using [this raw template](https://github.com/huggingface/huggingface_hub/blob/main/src/huggingface_hub/templates/datasetcard_template.md?plain=1). ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
Chong0/SBLGNT
[ "language:gr", "license:cc-by-4.0", "region:us" ]
2024-01-04T15:14:18+00:00
{"language": ["gr"], "license": ["cc-by-4.0"], "pretty_name": "SBLGNT"}
2024-01-04T15:45:12+00:00
[]
[ "gr" ]
TAGS #language-gr #license-cc-by-4.0 #region-us
# Dataset Card for Dataset Name This dataset card aims to be a base template for new datasets. It has been generated using this raw template. ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#language-gr #license-cc-by-4.0 #region-us \n", "# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 19, 34, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#language-gr #license-cc-by-4.0 #region-us \n# Dataset Card for Dataset Name\n\n\n\nThis dataset card aims to be a base template for new datasets. It has been generated using this raw template.## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
6a4528d31e771d77ea52c35748605c7fbf4c167f
# MetaHate: A Dataset for Unifying Efforts on Hate Speech Detection This is MetaHate: a meta-collection of 36 hate speech datasets from social media comments. ## Dataset Structure The dataset contains 1,226,202 social media posts in a TSV file. Each element contains the following fields: | Field Name | Type | Possible Values | Description | |------------|------|-----------------|----------------------------------------------------------------------| | text | str | any | Social media post. Each post is unique. | | label | int | 0, 1 | Label of the post. 0 for non-hate speech posts, 1 for hate speech. | ## Usage In order to use MetaHate you need to agree to our Terms and Conditions. Access to the complete meta-collection (1,226,202) will be granted only upon the submission of all relevant agreements for the derived datasets. Otherwise, we will only provide the access to the publicly available datasets (1,101,165 instances). To access the full data, we require the original Terms of Use of the following works: - [A Large Labeled Corpus for Online Harassment Research (Golbeck et al. 2017)](https://doi.org/10.1145/3091478.3091509) - [The 'Call me sexist but' Dataset (Samory et al. 2021)](https://search.gesis.org/research_data/SDN-10.7802-2251) - [Are You a Racist or Am I Seeing Things? Annotator Influence on Hate Speech Detection on Twitter (Waseem 2016)](https://doi.org/10.18653/v1/W16-5618) - [Hateful Symbols or Hateful People? Predictive Features for Hate Speech Detection on Twitter (Waseem and Hovy 2016)](https://doi.org/10.18653/v1/N16-2013) - [Aggression-annotated Corpus of Hindi-English Code-mixed Data (Kumar et al. 2018)](https://aclanthology.org/L18-1226) - [#MeTooMA: Multi-Aspect Annotations of Tweets Related to the MeToo Movement (Gautam et al. 2020)](https://doi.org/10.1609/icwsm.v14i1.7292) - [Pinpointing Fine-Grained Relationships between Hateful Tweets and Replies (Albanyan and Blanco 2022)](https://doi.org/10.1609/aaai.v36i10.21284) - [Large-Scale Hate Speech Detection with Cross-Domain Transfer (Toraman, Şahinuç, and Yilmaz 2022)](https://aclanthology.org/2022.lrec-1.238) - [Developing a Multilingual Annotated Corpus of Misogyny and Aggression (Bhattacharya et al. 2020)](https://aclanthology.org/2020.trac-1.25) ## Disclaimer This dataset includes content that may contain hate speech, offensive language, or other forms of inappropriate and objectionable material. The content present in the dataset is not created or endorsed by the authors or contributors of this project. It is collected from various sources and does not necessarily reflect the views or opinions of the project maintainers. The purpose of using this dataset is for research, analysis, or educational purposes only. The authors do not endorse or promote any harmful, discriminatory, or offensive behaviour conveyed in the dataset. Users are advised to exercise caution and sensitivity when interacting with or interpreting the dataset. If you choose to use the dataset, it is recommended to handle the content responsibly and in compliance with ethical guidelines and applicable laws. The project maintainers disclaim any responsibility for the content within the dataset and cannot be held liable for how it is used or interpreted by others. ## Citation If you use this dataset, please cite the following reference: ```bibtex @misc{piot2024metahate, title={MetaHate: A Dataset for Unifying Efforts on Hate Speech Detection}, author={Paloma Piot and Patricia Martín-Rodilla and Javier Parapar}, year={2024}, eprint={2401.06526}, archivePrefix={arXiv}, primaryClass={cs.CL} } ``` ## Acknowledgements The authors thank the funding from the Horizon Europe research and innovation programme under the Marie Skłodowska-Curie Grant Agreement No. 101073351. The authors also thank the financial support supplied by the Consellería de Cultura, Educación, Formación Profesional e Universidades (accreditation 2019-2022 ED431G/01, ED431B 2022/33) and the European Regional Development Fund, which acknowledges the CITIC Research Center in ICT of the University of A Coruña as a Research Center of the Galician University System and the project PID2022-137061OB-C21 (Ministerio de Ciencia e Innovación, Agencia Estatal de Investigación, Proyectos de Generación de Conocimiento; supported by the European Regional Development Fund). The authors also thank the funding of project PLEC2021-007662 (MCIN/AEI/10.13039/501100011033, Ministerio de Ciencia e Innovación, Agencia Estatal de Investigación, Plan de Recuperación, Transformación y Resiliencia, Unión Europea-Next Generation EU).
irlab-udc/metahate
[ "task_categories:text-classification", "size_categories:1M<n<10M", "language:en", "license:cc-by-nc-sa-4.0", "arxiv:2401.06526", "doi:10.57967/hf/1572", "region:us" ]
2024-01-04T15:18:05+00:00
{"language": ["en"], "license": "cc-by-nc-sa-4.0", "size_categories": ["1M<n<10M"], "task_categories": ["text-classification"], "pretty_name": "MetaHate"}
2024-01-29T08:13:30+00:00
[ "2401.06526" ]
[ "en" ]
TAGS #task_categories-text-classification #size_categories-1M<n<10M #language-English #license-cc-by-nc-sa-4.0 #arxiv-2401.06526 #doi-10.57967/hf/1572 #region-us
MetaHate: A Dataset for Unifying Efforts on Hate Speech Detection ================================================================= This is MetaHate: a meta-collection of 36 hate speech datasets from social media comments. Dataset Structure ----------------- The dataset contains 1,226,202 social media posts in a TSV file. Each element contains the following fields: Usage ----- In order to use MetaHate you need to agree to our Terms and Conditions. Access to the complete meta-collection (1,226,202) will be granted only upon the submission of all relevant agreements for the derived datasets. Otherwise, we will only provide the access to the publicly available datasets (1,101,165 instances). To access the full data, we require the original Terms of Use of the following works: * A Large Labeled Corpus for Online Harassment Research (Golbeck et al. 2017) * The 'Call me sexist but' Dataset (Samory et al. 2021) * Are You a Racist or Am I Seeing Things? Annotator Influence on Hate Speech Detection on Twitter (Waseem 2016) * Hateful Symbols or Hateful People? Predictive Features for Hate Speech Detection on Twitter (Waseem and Hovy 2016) * Aggression-annotated Corpus of Hindi-English Code-mixed Data (Kumar et al. 2018) * #MeTooMA: Multi-Aspect Annotations of Tweets Related to the MeToo Movement (Gautam et al. 2020) * Pinpointing Fine-Grained Relationships between Hateful Tweets and Replies (Albanyan and Blanco 2022) * Large-Scale Hate Speech Detection with Cross-Domain Transfer (Toraman, Şahinuç, and Yilmaz 2022) * Developing a Multilingual Annotated Corpus of Misogyny and Aggression (Bhattacharya et al. 2020) Disclaimer ---------- This dataset includes content that may contain hate speech, offensive language, or other forms of inappropriate and objectionable material. The content present in the dataset is not created or endorsed by the authors or contributors of this project. It is collected from various sources and does not necessarily reflect the views or opinions of the project maintainers. The purpose of using this dataset is for research, analysis, or educational purposes only. The authors do not endorse or promote any harmful, discriminatory, or offensive behaviour conveyed in the dataset. Users are advised to exercise caution and sensitivity when interacting with or interpreting the dataset. If you choose to use the dataset, it is recommended to handle the content responsibly and in compliance with ethical guidelines and applicable laws. The project maintainers disclaim any responsibility for the content within the dataset and cannot be held liable for how it is used or interpreted by others. If you use this dataset, please cite the following reference: Acknowledgements ---------------- The authors thank the funding from the Horizon Europe research and innovation programme under the Marie Skłodowska-Curie Grant Agreement No. 101073351. The authors also thank the financial support supplied by the Consellería de Cultura, Educación, Formación Profesional e Universidades (accreditation 2019-2022 ED431G/01, ED431B 2022/33) and the European Regional Development Fund, which acknowledges the CITIC Research Center in ICT of the University of A Coruña as a Research Center of the Galician University System and the project PID2022-137061OB-C21 (Ministerio de Ciencia e Innovación, Agencia Estatal de Investigación, Proyectos de Generación de Conocimiento; supported by the European Regional Development Fund). The authors also thank the funding of project PLEC2021-007662 (MCIN/AEI/10.13039/501100011033, Ministerio de Ciencia e Innovación, Agencia Estatal de Investigación, Plan de Recuperación, Transformación y Resiliencia, Unión Europea-Next Generation EU).
[]
[ "TAGS\n#task_categories-text-classification #size_categories-1M<n<10M #language-English #license-cc-by-nc-sa-4.0 #arxiv-2401.06526 #doi-10.57967/hf/1572 #region-us \n" ]
[ 67 ]
[ "passage: TAGS\n#task_categories-text-classification #size_categories-1M<n<10M #language-English #license-cc-by-nc-sa-4.0 #arxiv-2401.06526 #doi-10.57967/hf/1572 #region-us \n" ]
77c462f17ba89c598852fc8a2ad7f47e0f96f1b8
# Dataset Card for Evaluation run of NeuralNovel/Panda-7B-v0.1 <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [NeuralNovel/Panda-7B-v0.1](https://huggingface.co/NeuralNovel/Panda-7B-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_NeuralNovel__Panda-7B-v0.1", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T15:18:35.035620](https://huggingface.co/datasets/open-llm-leaderboard/details_NeuralNovel__Panda-7B-v0.1/blob/main/results_2024-01-04T15-18-35.035620.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.6067411577411931, "acc_stderr": 0.03324319692041124, "acc_norm": 0.6115988704639006, "acc_norm_stderr": 0.03391766146815033, "mc1": 0.5214198286413708, "mc1_stderr": 0.01748743214471164, "mc2": 0.6697345091207095, "mc2_stderr": 0.01518186947277888 }, "harness|arc:challenge|25": { "acc": 0.5930034129692833, "acc_stderr": 0.01435639941800912, "acc_norm": 0.6296928327645052, "acc_norm_stderr": 0.01411129875167495 }, "harness|hellaswag|10": { "acc": 0.6520613423620792, "acc_stderr": 0.004753429806645438, "acc_norm": 0.8375821549492133, "acc_norm_stderr": 0.003680798950531901 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.35, "acc_stderr": 0.04793724854411021, "acc_norm": 0.35, "acc_norm_stderr": 0.04793724854411021 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.5777777777777777, "acc_stderr": 0.04266763404099582, "acc_norm": 0.5777777777777777, "acc_norm_stderr": 0.04266763404099582 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.5986842105263158, "acc_stderr": 0.039889037033362836, "acc_norm": 0.5986842105263158, "acc_norm_stderr": 0.039889037033362836 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.58, "acc_stderr": 0.049604496374885836, "acc_norm": 0.58, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.6716981132075471, "acc_stderr": 0.028901593612411784, "acc_norm": 0.6716981132075471, "acc_norm_stderr": 0.028901593612411784 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.6736111111111112, "acc_stderr": 0.03921067198982266, "acc_norm": 0.6736111111111112, "acc_norm_stderr": 0.03921067198982266 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.42, "acc_stderr": 0.049604496374885836, "acc_norm": 0.42, "acc_norm_stderr": 0.049604496374885836 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.5, "acc_stderr": 0.050251890762960605, "acc_norm": 0.5, "acc_norm_stderr": 0.050251890762960605 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.37, "acc_stderr": 0.048523658709391, "acc_norm": 0.37, "acc_norm_stderr": 0.048523658709391 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.5895953757225434, "acc_stderr": 0.03750757044895537, "acc_norm": 0.5895953757225434, "acc_norm_stderr": 0.03750757044895537 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.4117647058823529, "acc_stderr": 0.048971049527263666, "acc_norm": 0.4117647058823529, "acc_norm_stderr": 0.048971049527263666 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.67, "acc_stderr": 0.04725815626252609, "acc_norm": 0.67, "acc_norm_stderr": 0.04725815626252609 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.5106382978723404, "acc_stderr": 0.03267862331014063, "acc_norm": 0.5106382978723404, "acc_norm_stderr": 0.03267862331014063 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.42105263157894735, "acc_stderr": 0.046446020912223177, "acc_norm": 0.42105263157894735, "acc_norm_stderr": 0.046446020912223177 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.6275862068965518, "acc_stderr": 0.04028731532947558, "acc_norm": 0.6275862068965518, "acc_norm_stderr": 0.04028731532947558 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.3835978835978836, "acc_stderr": 0.025043757318520193, "acc_norm": 0.3835978835978836, "acc_norm_stderr": 0.025043757318520193 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.4126984126984127, "acc_stderr": 0.04403438954768176, "acc_norm": 0.4126984126984127, "acc_norm_stderr": 0.04403438954768176 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.36, "acc_stderr": 0.04824181513244218, "acc_norm": 0.36, "acc_norm_stderr": 0.04824181513244218 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.6806451612903226, "acc_stderr": 0.026522709674667765, "acc_norm": 0.6806451612903226, "acc_norm_stderr": 0.026522709674667765 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.5123152709359606, "acc_stderr": 0.035169204442208966, "acc_norm": 0.5123152709359606, "acc_norm_stderr": 0.035169204442208966 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.66, "acc_stderr": 0.04760952285695237, "acc_norm": 0.66, "acc_norm_stderr": 0.04760952285695237 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.7333333333333333, "acc_stderr": 0.03453131801885417, "acc_norm": 0.7333333333333333, "acc_norm_stderr": 0.03453131801885417 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.7575757575757576, "acc_stderr": 0.030532892233932022, "acc_norm": 0.7575757575757576, "acc_norm_stderr": 0.030532892233932022 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.844559585492228, "acc_stderr": 0.026148483469153317, "acc_norm": 0.844559585492228, "acc_norm_stderr": 0.026148483469153317 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.5717948717948718, "acc_stderr": 0.02508830145469483, "acc_norm": 0.5717948717948718, "acc_norm_stderr": 0.02508830145469483 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.32222222222222224, "acc_stderr": 0.028493465091028593, "acc_norm": 0.32222222222222224, "acc_norm_stderr": 0.028493465091028593 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.634453781512605, "acc_stderr": 0.031282177063684614, "acc_norm": 0.634453781512605, "acc_norm_stderr": 0.031282177063684614 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.37748344370860926, "acc_stderr": 0.03958027231121569, "acc_norm": 0.37748344370860926, "acc_norm_stderr": 0.03958027231121569 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.7908256880733945, "acc_stderr": 0.017437937173343233, "acc_norm": 0.7908256880733945, "acc_norm_stderr": 0.017437937173343233 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.4861111111111111, "acc_stderr": 0.03408655867977748, "acc_norm": 0.4861111111111111, "acc_norm_stderr": 0.03408655867977748 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.7647058823529411, "acc_stderr": 0.02977177522814563, "acc_norm": 0.7647058823529411, "acc_norm_stderr": 0.02977177522814563 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.7552742616033755, "acc_stderr": 0.02798569938703643, "acc_norm": 0.7552742616033755, "acc_norm_stderr": 0.02798569938703643 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.6143497757847534, "acc_stderr": 0.03266842214289201, "acc_norm": 0.6143497757847534, "acc_norm_stderr": 0.03266842214289201 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.7022900763358778, "acc_stderr": 0.040103589424622034, "acc_norm": 0.7022900763358778, "acc_norm_stderr": 0.040103589424622034 }, "harness|hendrycksTest-international_law|5": { "acc": 0.8181818181818182, "acc_stderr": 0.03520893951097653, "acc_norm": 0.8181818181818182, "acc_norm_stderr": 0.03520893951097653 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.6944444444444444, "acc_stderr": 0.044531975073749834, "acc_norm": 0.6944444444444444, "acc_norm_stderr": 0.044531975073749834 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.7177914110429447, "acc_stderr": 0.03536117886664743, "acc_norm": 0.7177914110429447, "acc_norm_stderr": 0.03536117886664743 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.4107142857142857, "acc_stderr": 0.04669510663875191, "acc_norm": 0.4107142857142857, "acc_norm_stderr": 0.04669510663875191 }, "harness|hendrycksTest-management|5": { "acc": 0.7281553398058253, "acc_stderr": 0.044052680241409216, "acc_norm": 0.7281553398058253, "acc_norm_stderr": 0.044052680241409216 }, "harness|hendrycksTest-marketing|5": { "acc": 0.8547008547008547, "acc_stderr": 0.02308663508684141, "acc_norm": 0.8547008547008547, "acc_norm_stderr": 0.02308663508684141 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.65, "acc_stderr": 0.0479372485441102, "acc_norm": 0.65, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.7739463601532567, "acc_stderr": 0.014957458504335835, "acc_norm": 0.7739463601532567, "acc_norm_stderr": 0.014957458504335835 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.6878612716763006, "acc_stderr": 0.024946792225272314, "acc_norm": 0.6878612716763006, "acc_norm_stderr": 0.024946792225272314 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.38100558659217876, "acc_stderr": 0.016242028834053616, "acc_norm": 0.38100558659217876, "acc_norm_stderr": 0.016242028834053616 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.6862745098039216, "acc_stderr": 0.02656892101545715, "acc_norm": 0.6862745098039216, "acc_norm_stderr": 0.02656892101545715 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.6816720257234726, "acc_stderr": 0.026457225067811025, "acc_norm": 0.6816720257234726, "acc_norm_stderr": 0.026457225067811025 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.7037037037037037, "acc_stderr": 0.025407197798890162, "acc_norm": 0.7037037037037037, "acc_norm_stderr": 0.025407197798890162 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.44680851063829785, "acc_stderr": 0.029658235097666907, "acc_norm": 0.44680851063829785, "acc_norm_stderr": 0.029658235097666907 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.4256844850065189, "acc_stderr": 0.012628393551811943, "acc_norm": 0.4256844850065189, "acc_norm_stderr": 0.012628393551811943 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.625, "acc_stderr": 0.029408372932278746, "acc_norm": 0.625, "acc_norm_stderr": 0.029408372932278746 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.6209150326797386, "acc_stderr": 0.019627444748412236, "acc_norm": 0.6209150326797386, "acc_norm_stderr": 0.019627444748412236 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.7272727272727273, "acc_stderr": 0.04265792110940589, "acc_norm": 0.7272727272727273, "acc_norm_stderr": 0.04265792110940589 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.710204081632653, "acc_stderr": 0.029043088683304324, "acc_norm": 0.710204081632653, "acc_norm_stderr": 0.029043088683304324 }, "harness|hendrycksTest-sociology|5": { "acc": 0.7960199004975125, "acc_stderr": 0.02849317624532607, "acc_norm": 0.7960199004975125, "acc_norm_stderr": 0.02849317624532607 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.81, "acc_stderr": 0.03942772444036625, "acc_norm": 0.81, "acc_norm_stderr": 0.03942772444036625 }, "harness|hendrycksTest-virology|5": { "acc": 0.4879518072289157, "acc_stderr": 0.03891364495835821, "acc_norm": 0.4879518072289157, "acc_norm_stderr": 0.03891364495835821 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.8421052631578947, "acc_stderr": 0.02796678585916089, "acc_norm": 0.8421052631578947, "acc_norm_stderr": 0.02796678585916089 }, "harness|truthfulqa:mc|0": { "mc1": 0.5214198286413708, "mc1_stderr": 0.01748743214471164, "mc2": 0.6697345091207095, "mc2_stderr": 0.01518186947277888 }, "harness|winogrande|5": { "acc": 0.7624309392265194, "acc_stderr": 0.011961298905803152 }, "harness|gsm8k|5": { "acc": 0.3866565579984837, "acc_stderr": 0.013413955095965302 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_NeuralNovel__Panda-7B-v0.1
[ "region:us" ]
2024-01-04T15:20:53+00:00
{"pretty_name": "Evaluation run of NeuralNovel/Panda-7B-v0.1", "dataset_summary": "Dataset automatically created during the evaluation run of model [NeuralNovel/Panda-7B-v0.1](https://huggingface.co/NeuralNovel/Panda-7B-v0.1) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_NeuralNovel__Panda-7B-v0.1\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T15:18:35.035620](https://huggingface.co/datasets/open-llm-leaderboard/details_NeuralNovel__Panda-7B-v0.1/blob/main/results_2024-01-04T15-18-35.035620.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.6067411577411931,\n \"acc_stderr\": 0.03324319692041124,\n \"acc_norm\": 0.6115988704639006,\n \"acc_norm_stderr\": 0.03391766146815033,\n \"mc1\": 0.5214198286413708,\n \"mc1_stderr\": 0.01748743214471164,\n \"mc2\": 0.6697345091207095,\n \"mc2_stderr\": 0.01518186947277888\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.5930034129692833,\n \"acc_stderr\": 0.01435639941800912,\n \"acc_norm\": 0.6296928327645052,\n \"acc_norm_stderr\": 0.01411129875167495\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.6520613423620792,\n \"acc_stderr\": 0.004753429806645438,\n \"acc_norm\": 0.8375821549492133,\n \"acc_norm_stderr\": 0.003680798950531901\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.04793724854411021,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.04793724854411021\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.5777777777777777,\n \"acc_stderr\": 0.04266763404099582,\n \"acc_norm\": 0.5777777777777777,\n \"acc_norm_stderr\": 0.04266763404099582\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.5986842105263158,\n \"acc_stderr\": 0.039889037033362836,\n \"acc_norm\": 0.5986842105263158,\n \"acc_norm_stderr\": 0.039889037033362836\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.58,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.58,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.6716981132075471,\n \"acc_stderr\": 0.028901593612411784,\n \"acc_norm\": 0.6716981132075471,\n \"acc_norm_stderr\": 0.028901593612411784\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.6736111111111112,\n \"acc_stderr\": 0.03921067198982266,\n \"acc_norm\": 0.6736111111111112,\n \"acc_norm_stderr\": 0.03921067198982266\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.42,\n \"acc_stderr\": 0.049604496374885836,\n \"acc_norm\": 0.42,\n \"acc_norm_stderr\": 0.049604496374885836\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.050251890762960605,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.050251890762960605\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.048523658709391,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.048523658709391\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.5895953757225434,\n \"acc_stderr\": 0.03750757044895537,\n \"acc_norm\": 0.5895953757225434,\n \"acc_norm_stderr\": 0.03750757044895537\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.4117647058823529,\n \"acc_stderr\": 0.048971049527263666,\n \"acc_norm\": 0.4117647058823529,\n \"acc_norm_stderr\": 0.048971049527263666\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.67,\n \"acc_stderr\": 0.04725815626252609,\n \"acc_norm\": 0.67,\n \"acc_norm_stderr\": 0.04725815626252609\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.5106382978723404,\n \"acc_stderr\": 0.03267862331014063,\n \"acc_norm\": 0.5106382978723404,\n \"acc_norm_stderr\": 0.03267862331014063\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.42105263157894735,\n \"acc_stderr\": 0.046446020912223177,\n \"acc_norm\": 0.42105263157894735,\n \"acc_norm_stderr\": 0.046446020912223177\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.6275862068965518,\n \"acc_stderr\": 0.04028731532947558,\n \"acc_norm\": 0.6275862068965518,\n \"acc_norm_stderr\": 0.04028731532947558\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.3835978835978836,\n \"acc_stderr\": 0.025043757318520193,\n \"acc_norm\": 0.3835978835978836,\n \"acc_norm_stderr\": 0.025043757318520193\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.4126984126984127,\n \"acc_stderr\": 0.04403438954768176,\n \"acc_norm\": 0.4126984126984127,\n \"acc_norm_stderr\": 0.04403438954768176\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.36,\n \"acc_stderr\": 0.04824181513244218,\n \"acc_norm\": 0.36,\n \"acc_norm_stderr\": 0.04824181513244218\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.6806451612903226,\n \"acc_stderr\": 0.026522709674667765,\n \"acc_norm\": 0.6806451612903226,\n \"acc_norm_stderr\": 0.026522709674667765\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.5123152709359606,\n \"acc_stderr\": 0.035169204442208966,\n \"acc_norm\": 0.5123152709359606,\n \"acc_norm_stderr\": 0.035169204442208966\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.66,\n \"acc_stderr\": 0.04760952285695237,\n \"acc_norm\": 0.66,\n \"acc_norm_stderr\": 0.04760952285695237\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.7333333333333333,\n \"acc_stderr\": 0.03453131801885417,\n \"acc_norm\": 0.7333333333333333,\n \"acc_norm_stderr\": 0.03453131801885417\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.7575757575757576,\n \"acc_stderr\": 0.030532892233932022,\n \"acc_norm\": 0.7575757575757576,\n \"acc_norm_stderr\": 0.030532892233932022\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.844559585492228,\n \"acc_stderr\": 0.026148483469153317,\n \"acc_norm\": 0.844559585492228,\n \"acc_norm_stderr\": 0.026148483469153317\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.5717948717948718,\n \"acc_stderr\": 0.02508830145469483,\n \"acc_norm\": 0.5717948717948718,\n \"acc_norm_stderr\": 0.02508830145469483\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.32222222222222224,\n \"acc_stderr\": 0.028493465091028593,\n \"acc_norm\": 0.32222222222222224,\n \"acc_norm_stderr\": 0.028493465091028593\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.634453781512605,\n \"acc_stderr\": 0.031282177063684614,\n \"acc_norm\": 0.634453781512605,\n \"acc_norm_stderr\": 0.031282177063684614\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.37748344370860926,\n \"acc_stderr\": 0.03958027231121569,\n \"acc_norm\": 0.37748344370860926,\n \"acc_norm_stderr\": 0.03958027231121569\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.7908256880733945,\n \"acc_stderr\": 0.017437937173343233,\n \"acc_norm\": 0.7908256880733945,\n \"acc_norm_stderr\": 0.017437937173343233\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.4861111111111111,\n \"acc_stderr\": 0.03408655867977748,\n \"acc_norm\": 0.4861111111111111,\n \"acc_norm_stderr\": 0.03408655867977748\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.7647058823529411,\n \"acc_stderr\": 0.02977177522814563,\n \"acc_norm\": 0.7647058823529411,\n \"acc_norm_stderr\": 0.02977177522814563\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.7552742616033755,\n \"acc_stderr\": 0.02798569938703643,\n \"acc_norm\": 0.7552742616033755,\n \"acc_norm_stderr\": 0.02798569938703643\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.6143497757847534,\n \"acc_stderr\": 0.03266842214289201,\n \"acc_norm\": 0.6143497757847534,\n \"acc_norm_stderr\": 0.03266842214289201\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.7022900763358778,\n \"acc_stderr\": 0.040103589424622034,\n \"acc_norm\": 0.7022900763358778,\n \"acc_norm_stderr\": 0.040103589424622034\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.8181818181818182,\n \"acc_stderr\": 0.03520893951097653,\n \"acc_norm\": 0.8181818181818182,\n \"acc_norm_stderr\": 0.03520893951097653\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.6944444444444444,\n \"acc_stderr\": 0.044531975073749834,\n \"acc_norm\": 0.6944444444444444,\n \"acc_norm_stderr\": 0.044531975073749834\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.7177914110429447,\n \"acc_stderr\": 0.03536117886664743,\n \"acc_norm\": 0.7177914110429447,\n \"acc_norm_stderr\": 0.03536117886664743\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.4107142857142857,\n \"acc_stderr\": 0.04669510663875191,\n \"acc_norm\": 0.4107142857142857,\n \"acc_norm_stderr\": 0.04669510663875191\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.7281553398058253,\n \"acc_stderr\": 0.044052680241409216,\n \"acc_norm\": 0.7281553398058253,\n \"acc_norm_stderr\": 0.044052680241409216\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.8547008547008547,\n \"acc_stderr\": 0.02308663508684141,\n \"acc_norm\": 0.8547008547008547,\n \"acc_norm_stderr\": 0.02308663508684141\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.65,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.65,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.7739463601532567,\n \"acc_stderr\": 0.014957458504335835,\n \"acc_norm\": 0.7739463601532567,\n \"acc_norm_stderr\": 0.014957458504335835\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.6878612716763006,\n \"acc_stderr\": 0.024946792225272314,\n \"acc_norm\": 0.6878612716763006,\n \"acc_norm_stderr\": 0.024946792225272314\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.38100558659217876,\n \"acc_stderr\": 0.016242028834053616,\n \"acc_norm\": 0.38100558659217876,\n \"acc_norm_stderr\": 0.016242028834053616\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.6862745098039216,\n \"acc_stderr\": 0.02656892101545715,\n \"acc_norm\": 0.6862745098039216,\n \"acc_norm_stderr\": 0.02656892101545715\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.6816720257234726,\n \"acc_stderr\": 0.026457225067811025,\n \"acc_norm\": 0.6816720257234726,\n \"acc_norm_stderr\": 0.026457225067811025\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.7037037037037037,\n \"acc_stderr\": 0.025407197798890162,\n \"acc_norm\": 0.7037037037037037,\n \"acc_norm_stderr\": 0.025407197798890162\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.44680851063829785,\n \"acc_stderr\": 0.029658235097666907,\n \"acc_norm\": 0.44680851063829785,\n \"acc_norm_stderr\": 0.029658235097666907\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.4256844850065189,\n \"acc_stderr\": 0.012628393551811943,\n \"acc_norm\": 0.4256844850065189,\n \"acc_norm_stderr\": 0.012628393551811943\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.625,\n \"acc_stderr\": 0.029408372932278746,\n \"acc_norm\": 0.625,\n \"acc_norm_stderr\": 0.029408372932278746\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.6209150326797386,\n \"acc_stderr\": 0.019627444748412236,\n \"acc_norm\": 0.6209150326797386,\n \"acc_norm_stderr\": 0.019627444748412236\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.7272727272727273,\n \"acc_stderr\": 0.04265792110940589,\n \"acc_norm\": 0.7272727272727273,\n \"acc_norm_stderr\": 0.04265792110940589\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.710204081632653,\n \"acc_stderr\": 0.029043088683304324,\n \"acc_norm\": 0.710204081632653,\n \"acc_norm_stderr\": 0.029043088683304324\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.7960199004975125,\n \"acc_stderr\": 0.02849317624532607,\n \"acc_norm\": 0.7960199004975125,\n \"acc_norm_stderr\": 0.02849317624532607\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.81,\n \"acc_stderr\": 0.03942772444036625,\n \"acc_norm\": 0.81,\n \"acc_norm_stderr\": 0.03942772444036625\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.4879518072289157,\n \"acc_stderr\": 0.03891364495835821,\n \"acc_norm\": 0.4879518072289157,\n \"acc_norm_stderr\": 0.03891364495835821\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.8421052631578947,\n \"acc_stderr\": 0.02796678585916089,\n \"acc_norm\": 0.8421052631578947,\n \"acc_norm_stderr\": 0.02796678585916089\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.5214198286413708,\n \"mc1_stderr\": 0.01748743214471164,\n \"mc2\": 0.6697345091207095,\n \"mc2_stderr\": 0.01518186947277888\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.7624309392265194,\n \"acc_stderr\": 0.011961298905803152\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.3866565579984837,\n \"acc_stderr\": 0.013413955095965302\n }\n}\n```", "repo_url": "https://huggingface.co/NeuralNovel/Panda-7B-v0.1", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|arc:challenge|25_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|gsm8k|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hellaswag|10_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T15-18-35.035620.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["**/details_harness|winogrande|5_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T15-18-35.035620.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T15_18_35.035620", "path": ["results_2024-01-04T15-18-35.035620.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T15-18-35.035620.parquet"]}]}]}
2024-01-04T15:21:14+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of NeuralNovel/Panda-7B-v0.1 Dataset automatically created during the evaluation run of model NeuralNovel/Panda-7B-v0.1 on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T15:18:35.035620(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of NeuralNovel/Panda-7B-v0.1\n\n\n\nDataset automatically created during the evaluation run of model NeuralNovel/Panda-7B-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T15:18:35.035620(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of NeuralNovel/Panda-7B-v0.1\n\n\n\nDataset automatically created during the evaluation run of model NeuralNovel/Panda-7B-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T15:18:35.035620(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 185, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of NeuralNovel/Panda-7B-v0.1\n\n\n\nDataset automatically created during the evaluation run of model NeuralNovel/Panda-7B-v0.1 on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T15:18:35.035620(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
1ee5844b2e2da6d0671480219001cb3d73b97022
Here are the ***behavior datasets*** used for supervised fine-tuning (SFT). And they can also be used for direct preference optimization (DPO). The exact copy can also be found in [Github](https://github.com/PKU-YuanGroup/Machine-Mindset/edit/main/datasets/behaviour). Prefix ***'en'*** denotes the datasets of the English version. Prefix ***'zh'*** denotes the datasets of the Chinese version. ## Dataset introduction There are four dimension in MBTI. And there are two opposite attributes within each dimension. To be specific: + Energe: Extraversion (E) - Introversion (I) + Information: Sensing (S) - Intuition (N) + Decision: Thinking (T) - Feeling (F) + Execution: Judging (J) - Perceiving (P) Based on the above, you can infer the content of the json file from its name. The datasets follow the Alpaca format, consisting of instruction, input and output. ## How to use these datasets for behavior supervised fine-tuning (SFT) For example, if you want to make an LLM behave like an ***ISFJ***, you need to select ***the four corresponding files*** (en_energe_introversion.json, en_information_sensing.json, en_decision_feeling.json, en_execution_judging.json). And use the four for SFT. ## How to use these datasets for direct preference optimization (DPO) For example, if you want to make an LLM be ***more feeling (F) than thinking (T)*** by DPO, you need to select ***the two corresponding files*** (en_decision_feeling.json, en_decision_thinking.json). And then compile the two into the correct format for DPO. For the correct format, please refer to [this](https://github.com/PKU-YuanGroup/Machine-Mindset/blob/main/datasets/dpo/README.md).
FarReelAILab/Machine_Mindset_MBTI_dataset
[ "license:apache-2.0", "region:us" ]
2024-01-04T15:21:41+00:00
{"license": "apache-2.0"}
2024-01-06T04:34:23+00:00
[]
[]
TAGS #license-apache-2.0 #region-us
Here are the *behavior datasets* used for supervised fine-tuning (SFT). And they can also be used for direct preference optimization (DPO). The exact copy can also be found in Github. Prefix *'en'* denotes the datasets of the English version. Prefix *'zh'* denotes the datasets of the Chinese version. ## Dataset introduction There are four dimension in MBTI. And there are two opposite attributes within each dimension. To be specific: + Energe: Extraversion (E) - Introversion (I) + Information: Sensing (S) - Intuition (N) + Decision: Thinking (T) - Feeling (F) + Execution: Judging (J) - Perceiving (P) Based on the above, you can infer the content of the json file from its name. The datasets follow the Alpaca format, consisting of instruction, input and output. ## How to use these datasets for behavior supervised fine-tuning (SFT) For example, if you want to make an LLM behave like an *ISFJ*, you need to select *the four corresponding files* (en_energe_introversion.json, en_information_sensing.json, en_decision_feeling.json, en_execution_judging.json). And use the four for SFT. ## How to use these datasets for direct preference optimization (DPO) For example, if you want to make an LLM be *more feeling (F) than thinking (T)* by DPO, you need to select *the two corresponding files* (en_decision_feeling.json, en_decision_thinking.json). And then compile the two into the correct format for DPO. For the correct format, please refer to this.
[ "## Dataset introduction\n\nThere are four dimension in MBTI. And there are two opposite attributes within each dimension.\n\nTo be specific:\n\n+ Energe: Extraversion (E) - Introversion (I)\n\n+ Information: Sensing (S) - Intuition (N)\n\n+ Decision: Thinking (T) - Feeling (F)\n\n+ Execution: Judging (J) - Perceiving (P)\n\nBased on the above, you can infer the content of the json file from its name.\n\nThe datasets follow the Alpaca format, consisting of instruction, input and output.", "## How to use these datasets for behavior supervised fine-tuning (SFT)\n\nFor example, if you want to make an LLM behave like an *ISFJ*, you need to select *the four corresponding files* (en_energe_introversion.json, en_information_sensing.json, en_decision_feeling.json, en_execution_judging.json). \n\nAnd use the four for SFT.", "## How to use these datasets for direct preference optimization (DPO)\n\nFor example, if you want to make an LLM be *more feeling (F) than thinking (T)* by DPO, you need to select *the two corresponding files* (en_decision_feeling.json, en_decision_thinking.json). \n\nAnd then compile the two into the correct format for DPO. For the correct format, please refer to this." ]
[ "TAGS\n#license-apache-2.0 #region-us \n", "## Dataset introduction\n\nThere are four dimension in MBTI. And there are two opposite attributes within each dimension.\n\nTo be specific:\n\n+ Energe: Extraversion (E) - Introversion (I)\n\n+ Information: Sensing (S) - Intuition (N)\n\n+ Decision: Thinking (T) - Feeling (F)\n\n+ Execution: Judging (J) - Perceiving (P)\n\nBased on the above, you can infer the content of the json file from its name.\n\nThe datasets follow the Alpaca format, consisting of instruction, input and output.", "## How to use these datasets for behavior supervised fine-tuning (SFT)\n\nFor example, if you want to make an LLM behave like an *ISFJ*, you need to select *the four corresponding files* (en_energe_introversion.json, en_information_sensing.json, en_decision_feeling.json, en_execution_judging.json). \n\nAnd use the four for SFT.", "## How to use these datasets for direct preference optimization (DPO)\n\nFor example, if you want to make an LLM be *more feeling (F) than thinking (T)* by DPO, you need to select *the two corresponding files* (en_decision_feeling.json, en_decision_thinking.json). \n\nAnd then compile the two into the correct format for DPO. For the correct format, please refer to this." ]
[ 14, 133, 108, 105 ]
[ "passage: TAGS\n#license-apache-2.0 #region-us \n## Dataset introduction\n\nThere are four dimension in MBTI. And there are two opposite attributes within each dimension.\n\nTo be specific:\n\n+ Energe: Extraversion (E) - Introversion (I)\n\n+ Information: Sensing (S) - Intuition (N)\n\n+ Decision: Thinking (T) - Feeling (F)\n\n+ Execution: Judging (J) - Perceiving (P)\n\nBased on the above, you can infer the content of the json file from its name.\n\nThe datasets follow the Alpaca format, consisting of instruction, input and output.## How to use these datasets for behavior supervised fine-tuning (SFT)\n\nFor example, if you want to make an LLM behave like an *ISFJ*, you need to select *the four corresponding files* (en_energe_introversion.json, en_information_sensing.json, en_decision_feeling.json, en_execution_judging.json). \n\nAnd use the four for SFT.## How to use these datasets for direct preference optimization (DPO)\n\nFor example, if you want to make an LLM be *more feeling (F) than thinking (T)* by DPO, you need to select *the two corresponding files* (en_decision_feeling.json, en_decision_thinking.json). \n\nAnd then compile the two into the correct format for DPO. For the correct format, please refer to this." ]
ddffebd61a5c6f5cf4ca96399bdcad1b280c3697
# Dataset Card for Evaluation run of KnutJaegersberg/Deacon-1_8b <!-- Provide a quick summary of the dataset. --> Dataset automatically created during the evaluation run of model [KnutJaegersberg/Deacon-1_8b](https://huggingface.co/KnutJaegersberg/Deacon-1_8b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard). The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)). To load the details from a run, you can for instance do the following: ```python from datasets import load_dataset data = load_dataset("open-llm-leaderboard/details_KnutJaegersberg__Deacon-1_8b", "harness_winogrande_5", split="train") ``` ## Latest results These are the [latest results from run 2024-01-04T15:20:39.040351](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__Deacon-1_8b/blob/main/results_2024-01-04T15-20-39.040351.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ```python { "all": { "acc": 0.3383700092600465, "acc_stderr": 0.03332769015870953, "acc_norm": 0.34275566483863507, "acc_norm_stderr": 0.034230027342292337, "mc1": 0.2423500611995104, "mc1_stderr": 0.015000674373570347, "mc2": 0.39049431751977004, "mc2_stderr": 0.01530713715267398 }, "harness|arc:challenge|25": { "acc": 0.30119453924914674, "acc_stderr": 0.013406741767847626, "acc_norm": 0.3370307167235495, "acc_norm_stderr": 0.013813476652902276 }, "harness|hellaswag|10": { "acc": 0.4056960764787891, "acc_stderr": 0.004900227226433379, "acc_norm": 0.5233021310495917, "acc_norm_stderr": 0.0049843596699519245 }, "harness|hendrycksTest-abstract_algebra|5": { "acc": 0.27, "acc_stderr": 0.044619604333847415, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847415 }, "harness|hendrycksTest-anatomy|5": { "acc": 0.34074074074074073, "acc_stderr": 0.04094376269996794, "acc_norm": 0.34074074074074073, "acc_norm_stderr": 0.04094376269996794 }, "harness|hendrycksTest-astronomy|5": { "acc": 0.34868421052631576, "acc_stderr": 0.03878139888797609, "acc_norm": 0.34868421052631576, "acc_norm_stderr": 0.03878139888797609 }, "harness|hendrycksTest-business_ethics|5": { "acc": 0.28, "acc_stderr": 0.045126085985421276, "acc_norm": 0.28, "acc_norm_stderr": 0.045126085985421276 }, "harness|hendrycksTest-clinical_knowledge|5": { "acc": 0.38113207547169814, "acc_stderr": 0.029890609686286648, "acc_norm": 0.38113207547169814, "acc_norm_stderr": 0.029890609686286648 }, "harness|hendrycksTest-college_biology|5": { "acc": 0.2638888888888889, "acc_stderr": 0.03685651095897532, "acc_norm": 0.2638888888888889, "acc_norm_stderr": 0.03685651095897532 }, "harness|hendrycksTest-college_chemistry|5": { "acc": 0.28, "acc_stderr": 0.04512608598542126, "acc_norm": 0.28, "acc_norm_stderr": 0.04512608598542126 }, "harness|hendrycksTest-college_computer_science|5": { "acc": 0.23, "acc_stderr": 0.04229525846816506, "acc_norm": 0.23, "acc_norm_stderr": 0.04229525846816506 }, "harness|hendrycksTest-college_mathematics|5": { "acc": 0.27, "acc_stderr": 0.044619604333847394, "acc_norm": 0.27, "acc_norm_stderr": 0.044619604333847394 }, "harness|hendrycksTest-college_medicine|5": { "acc": 0.26011560693641617, "acc_stderr": 0.033450369167889904, "acc_norm": 0.26011560693641617, "acc_norm_stderr": 0.033450369167889904 }, "harness|hendrycksTest-college_physics|5": { "acc": 0.24509803921568626, "acc_stderr": 0.042801058373643966, "acc_norm": 0.24509803921568626, "acc_norm_stderr": 0.042801058373643966 }, "harness|hendrycksTest-computer_security|5": { "acc": 0.37, "acc_stderr": 0.04852365870939099, "acc_norm": 0.37, "acc_norm_stderr": 0.04852365870939099 }, "harness|hendrycksTest-conceptual_physics|5": { "acc": 0.37872340425531914, "acc_stderr": 0.03170995606040655, "acc_norm": 0.37872340425531914, "acc_norm_stderr": 0.03170995606040655 }, "harness|hendrycksTest-econometrics|5": { "acc": 0.2719298245614035, "acc_stderr": 0.04185774424022057, "acc_norm": 0.2719298245614035, "acc_norm_stderr": 0.04185774424022057 }, "harness|hendrycksTest-electrical_engineering|5": { "acc": 0.36551724137931035, "acc_stderr": 0.040131241954243856, "acc_norm": 0.36551724137931035, "acc_norm_stderr": 0.040131241954243856 }, "harness|hendrycksTest-elementary_mathematics|5": { "acc": 0.24603174603174602, "acc_stderr": 0.022182037202948365, "acc_norm": 0.24603174603174602, "acc_norm_stderr": 0.022182037202948365 }, "harness|hendrycksTest-formal_logic|5": { "acc": 0.23809523809523808, "acc_stderr": 0.03809523809523811, "acc_norm": 0.23809523809523808, "acc_norm_stderr": 0.03809523809523811 }, "harness|hendrycksTest-global_facts|5": { "acc": 0.33, "acc_stderr": 0.047258156262526045, "acc_norm": 0.33, "acc_norm_stderr": 0.047258156262526045 }, "harness|hendrycksTest-high_school_biology|5": { "acc": 0.38387096774193546, "acc_stderr": 0.027666182075539638, "acc_norm": 0.38387096774193546, "acc_norm_stderr": 0.027666182075539638 }, "harness|hendrycksTest-high_school_chemistry|5": { "acc": 0.33004926108374383, "acc_stderr": 0.03308530426228258, "acc_norm": 0.33004926108374383, "acc_norm_stderr": 0.03308530426228258 }, "harness|hendrycksTest-high_school_computer_science|5": { "acc": 0.35, "acc_stderr": 0.0479372485441102, "acc_norm": 0.35, "acc_norm_stderr": 0.0479372485441102 }, "harness|hendrycksTest-high_school_european_history|5": { "acc": 0.3090909090909091, "acc_stderr": 0.036085410115739666, "acc_norm": 0.3090909090909091, "acc_norm_stderr": 0.036085410115739666 }, "harness|hendrycksTest-high_school_geography|5": { "acc": 0.4292929292929293, "acc_stderr": 0.035265527246011986, "acc_norm": 0.4292929292929293, "acc_norm_stderr": 0.035265527246011986 }, "harness|hendrycksTest-high_school_government_and_politics|5": { "acc": 0.41968911917098445, "acc_stderr": 0.035615873276858834, "acc_norm": 0.41968911917098445, "acc_norm_stderr": 0.035615873276858834 }, "harness|hendrycksTest-high_school_macroeconomics|5": { "acc": 0.3487179487179487, "acc_stderr": 0.02416278028401772, "acc_norm": 0.3487179487179487, "acc_norm_stderr": 0.02416278028401772 }, "harness|hendrycksTest-high_school_mathematics|5": { "acc": 0.27037037037037037, "acc_stderr": 0.027080372815145665, "acc_norm": 0.27037037037037037, "acc_norm_stderr": 0.027080372815145665 }, "harness|hendrycksTest-high_school_microeconomics|5": { "acc": 0.3739495798319328, "acc_stderr": 0.031429466378837076, "acc_norm": 0.3739495798319328, "acc_norm_stderr": 0.031429466378837076 }, "harness|hendrycksTest-high_school_physics|5": { "acc": 0.2251655629139073, "acc_stderr": 0.03410435282008936, "acc_norm": 0.2251655629139073, "acc_norm_stderr": 0.03410435282008936 }, "harness|hendrycksTest-high_school_psychology|5": { "acc": 0.3596330275229358, "acc_stderr": 0.020575234660123787, "acc_norm": 0.3596330275229358, "acc_norm_stderr": 0.020575234660123787 }, "harness|hendrycksTest-high_school_statistics|5": { "acc": 0.25462962962962965, "acc_stderr": 0.029711275860005344, "acc_norm": 0.25462962962962965, "acc_norm_stderr": 0.029711275860005344 }, "harness|hendrycksTest-high_school_us_history|5": { "acc": 0.35784313725490197, "acc_stderr": 0.03364487286088298, "acc_norm": 0.35784313725490197, "acc_norm_stderr": 0.03364487286088298 }, "harness|hendrycksTest-high_school_world_history|5": { "acc": 0.4219409282700422, "acc_stderr": 0.032148146302403695, "acc_norm": 0.4219409282700422, "acc_norm_stderr": 0.032148146302403695 }, "harness|hendrycksTest-human_aging|5": { "acc": 0.42152466367713004, "acc_stderr": 0.03314190222110658, "acc_norm": 0.42152466367713004, "acc_norm_stderr": 0.03314190222110658 }, "harness|hendrycksTest-human_sexuality|5": { "acc": 0.44274809160305345, "acc_stderr": 0.043564472026650695, "acc_norm": 0.44274809160305345, "acc_norm_stderr": 0.043564472026650695 }, "harness|hendrycksTest-international_law|5": { "acc": 0.45454545454545453, "acc_stderr": 0.045454545454545456, "acc_norm": 0.45454545454545453, "acc_norm_stderr": 0.045454545454545456 }, "harness|hendrycksTest-jurisprudence|5": { "acc": 0.37037037037037035, "acc_stderr": 0.04668408033024932, "acc_norm": 0.37037037037037035, "acc_norm_stderr": 0.04668408033024932 }, "harness|hendrycksTest-logical_fallacies|5": { "acc": 0.3374233128834356, "acc_stderr": 0.03714908409935575, "acc_norm": 0.3374233128834356, "acc_norm_stderr": 0.03714908409935575 }, "harness|hendrycksTest-machine_learning|5": { "acc": 0.29464285714285715, "acc_stderr": 0.0432704093257873, "acc_norm": 0.29464285714285715, "acc_norm_stderr": 0.0432704093257873 }, "harness|hendrycksTest-management|5": { "acc": 0.4077669902912621, "acc_stderr": 0.048657775704107675, "acc_norm": 0.4077669902912621, "acc_norm_stderr": 0.048657775704107675 }, "harness|hendrycksTest-marketing|5": { "acc": 0.5, "acc_stderr": 0.03275608910402091, "acc_norm": 0.5, "acc_norm_stderr": 0.03275608910402091 }, "harness|hendrycksTest-medical_genetics|5": { "acc": 0.34, "acc_stderr": 0.04760952285695235, "acc_norm": 0.34, "acc_norm_stderr": 0.04760952285695235 }, "harness|hendrycksTest-miscellaneous|5": { "acc": 0.38697318007662834, "acc_stderr": 0.017417138059440136, "acc_norm": 0.38697318007662834, "acc_norm_stderr": 0.017417138059440136 }, "harness|hendrycksTest-moral_disputes|5": { "acc": 0.34104046242774566, "acc_stderr": 0.025522474632121612, "acc_norm": 0.34104046242774566, "acc_norm_stderr": 0.025522474632121612 }, "harness|hendrycksTest-moral_scenarios|5": { "acc": 0.2424581005586592, "acc_stderr": 0.014333522059217889, "acc_norm": 0.2424581005586592, "acc_norm_stderr": 0.014333522059217889 }, "harness|hendrycksTest-nutrition|5": { "acc": 0.369281045751634, "acc_stderr": 0.02763417668960266, "acc_norm": 0.369281045751634, "acc_norm_stderr": 0.02763417668960266 }, "harness|hendrycksTest-philosophy|5": { "acc": 0.3633440514469453, "acc_stderr": 0.027316847674192717, "acc_norm": 0.3633440514469453, "acc_norm_stderr": 0.027316847674192717 }, "harness|hendrycksTest-prehistory|5": { "acc": 0.3765432098765432, "acc_stderr": 0.026959344518747787, "acc_norm": 0.3765432098765432, "acc_norm_stderr": 0.026959344518747787 }, "harness|hendrycksTest-professional_accounting|5": { "acc": 0.2907801418439716, "acc_stderr": 0.027090664368353178, "acc_norm": 0.2907801418439716, "acc_norm_stderr": 0.027090664368353178 }, "harness|hendrycksTest-professional_law|5": { "acc": 0.2861799217731421, "acc_stderr": 0.011543642878150757, "acc_norm": 0.2861799217731421, "acc_norm_stderr": 0.011543642878150757 }, "harness|hendrycksTest-professional_medicine|5": { "acc": 0.27205882352941174, "acc_stderr": 0.02703304115168146, "acc_norm": 0.27205882352941174, "acc_norm_stderr": 0.02703304115168146 }, "harness|hendrycksTest-professional_psychology|5": { "acc": 0.31209150326797386, "acc_stderr": 0.01874501120127766, "acc_norm": 0.31209150326797386, "acc_norm_stderr": 0.01874501120127766 }, "harness|hendrycksTest-public_relations|5": { "acc": 0.4, "acc_stderr": 0.0469237132203465, "acc_norm": 0.4, "acc_norm_stderr": 0.0469237132203465 }, "harness|hendrycksTest-security_studies|5": { "acc": 0.3469387755102041, "acc_stderr": 0.030472526026726492, "acc_norm": 0.3469387755102041, "acc_norm_stderr": 0.030472526026726492 }, "harness|hendrycksTest-sociology|5": { "acc": 0.4079601990049751, "acc_stderr": 0.034751163651940926, "acc_norm": 0.4079601990049751, "acc_norm_stderr": 0.034751163651940926 }, "harness|hendrycksTest-us_foreign_policy|5": { "acc": 0.48, "acc_stderr": 0.050211673156867795, "acc_norm": 0.48, "acc_norm_stderr": 0.050211673156867795 }, "harness|hendrycksTest-virology|5": { "acc": 0.39759036144578314, "acc_stderr": 0.038099730845402184, "acc_norm": 0.39759036144578314, "acc_norm_stderr": 0.038099730845402184 }, "harness|hendrycksTest-world_religions|5": { "acc": 0.3157894736842105, "acc_stderr": 0.03565079670708311, "acc_norm": 0.3157894736842105, "acc_norm_stderr": 0.03565079670708311 }, "harness|truthfulqa:mc|0": { "mc1": 0.2423500611995104, "mc1_stderr": 0.015000674373570347, "mc2": 0.39049431751977004, "mc2_stderr": 0.01530713715267398 }, "harness|winogrande|5": { "acc": 0.5714285714285714, "acc_stderr": 0.013908353814606679 }, "harness|gsm8k|5": { "acc": 0.0, "acc_stderr": 0.0 } } ``` ## Dataset Details ### Dataset Description <!-- Provide a longer summary of what this dataset is. --> - **Curated by:** [More Information Needed] - **Funded by [optional]:** [More Information Needed] - **Shared by [optional]:** [More Information Needed] - **Language(s) (NLP):** [More Information Needed] - **License:** [More Information Needed] ### Dataset Sources [optional] <!-- Provide the basic links for the dataset. --> - **Repository:** [More Information Needed] - **Paper [optional]:** [More Information Needed] - **Demo [optional]:** [More Information Needed] ## Uses <!-- Address questions around how the dataset is intended to be used. --> ### Direct Use <!-- This section describes suitable use cases for the dataset. --> [More Information Needed] ### Out-of-Scope Use <!-- This section addresses misuse, malicious use, and uses that the dataset will not work well for. --> [More Information Needed] ## Dataset Structure <!-- This section provides a description of the dataset fields, and additional information about the dataset structure such as criteria used to create the splits, relationships between data points, etc. --> [More Information Needed] ## Dataset Creation ### Curation Rationale <!-- Motivation for the creation of this dataset. --> [More Information Needed] ### Source Data <!-- This section describes the source data (e.g. news text and headlines, social media posts, translated sentences, ...). --> #### Data Collection and Processing <!-- This section describes the data collection and processing process such as data selection criteria, filtering and normalization methods, tools and libraries used, etc. --> [More Information Needed] #### Who are the source data producers? <!-- This section describes the people or systems who originally created the data. It should also include self-reported demographic or identity information for the source data creators if this information is available. --> [More Information Needed] ### Annotations [optional] <!-- If the dataset contains annotations which are not part of the initial data collection, use this section to describe them. --> #### Annotation process <!-- This section describes the annotation process such as annotation tools used in the process, the amount of data annotated, annotation guidelines provided to the annotators, interannotator statistics, annotation validation, etc. --> [More Information Needed] #### Who are the annotators? <!-- This section describes the people or systems who created the annotations. --> [More Information Needed] #### Personal and Sensitive Information <!-- State whether the dataset contains data that might be considered personal, sensitive, or private (e.g., data that reveals addresses, uniquely identifiable names or aliases, racial or ethnic origins, sexual orientations, religious beliefs, political opinions, financial or health data, etc.). If efforts were made to anonymize the data, describe the anonymization process. --> [More Information Needed] ## Bias, Risks, and Limitations <!-- This section is meant to convey both technical and sociotechnical limitations. --> [More Information Needed] ### Recommendations <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. --> Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. ## Citation [optional] <!-- If there is a paper or blog post introducing the dataset, the APA and Bibtex information for that should go in this section. --> **BibTeX:** [More Information Needed] **APA:** [More Information Needed] ## Glossary [optional] <!-- If relevant, include terms and calculations in this section that can help readers understand the dataset or dataset card. --> [More Information Needed] ## More Information [optional] [More Information Needed] ## Dataset Card Authors [optional] [More Information Needed] ## Dataset Card Contact [More Information Needed]
open-llm-leaderboard/details_KnutJaegersberg__Deacon-1_8b
[ "region:us" ]
2024-01-04T15:22:47+00:00
{"pretty_name": "Evaluation run of KnutJaegersberg/Deacon-1_8b", "dataset_summary": "Dataset automatically created during the evaluation run of model [KnutJaegersberg/Deacon-1_8b](https://huggingface.co/KnutJaegersberg/Deacon-1_8b) on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard).\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the [Open LLM Leaderboard](https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard)).\n\nTo load the details from a run, you can for instance do the following:\n```python\nfrom datasets import load_dataset\ndata = load_dataset(\"open-llm-leaderboard/details_KnutJaegersberg__Deacon-1_8b\",\n\t\"harness_winogrande_5\",\n\tsplit=\"train\")\n```\n\n## Latest results\n\nThese are the [latest results from run 2024-01-04T15:20:39.040351](https://huggingface.co/datasets/open-llm-leaderboard/details_KnutJaegersberg__Deacon-1_8b/blob/main/results_2024-01-04T15-20-39.040351.json)(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):\n\n```python\n{\n \"all\": {\n \"acc\": 0.3383700092600465,\n \"acc_stderr\": 0.03332769015870953,\n \"acc_norm\": 0.34275566483863507,\n \"acc_norm_stderr\": 0.034230027342292337,\n \"mc1\": 0.2423500611995104,\n \"mc1_stderr\": 0.015000674373570347,\n \"mc2\": 0.39049431751977004,\n \"mc2_stderr\": 0.01530713715267398\n },\n \"harness|arc:challenge|25\": {\n \"acc\": 0.30119453924914674,\n \"acc_stderr\": 0.013406741767847626,\n \"acc_norm\": 0.3370307167235495,\n \"acc_norm_stderr\": 0.013813476652902276\n },\n \"harness|hellaswag|10\": {\n \"acc\": 0.4056960764787891,\n \"acc_stderr\": 0.004900227226433379,\n \"acc_norm\": 0.5233021310495917,\n \"acc_norm_stderr\": 0.0049843596699519245\n },\n \"harness|hendrycksTest-abstract_algebra|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847415,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847415\n },\n \"harness|hendrycksTest-anatomy|5\": {\n \"acc\": 0.34074074074074073,\n \"acc_stderr\": 0.04094376269996794,\n \"acc_norm\": 0.34074074074074073,\n \"acc_norm_stderr\": 0.04094376269996794\n },\n \"harness|hendrycksTest-astronomy|5\": {\n \"acc\": 0.34868421052631576,\n \"acc_stderr\": 0.03878139888797609,\n \"acc_norm\": 0.34868421052631576,\n \"acc_norm_stderr\": 0.03878139888797609\n },\n \"harness|hendrycksTest-business_ethics|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.045126085985421276,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.045126085985421276\n },\n \"harness|hendrycksTest-clinical_knowledge|5\": {\n \"acc\": 0.38113207547169814,\n \"acc_stderr\": 0.029890609686286648,\n \"acc_norm\": 0.38113207547169814,\n \"acc_norm_stderr\": 0.029890609686286648\n },\n \"harness|hendrycksTest-college_biology|5\": {\n \"acc\": 0.2638888888888889,\n \"acc_stderr\": 0.03685651095897532,\n \"acc_norm\": 0.2638888888888889,\n \"acc_norm_stderr\": 0.03685651095897532\n },\n \"harness|hendrycksTest-college_chemistry|5\": {\n \"acc\": 0.28,\n \"acc_stderr\": 0.04512608598542126,\n \"acc_norm\": 0.28,\n \"acc_norm_stderr\": 0.04512608598542126\n },\n \"harness|hendrycksTest-college_computer_science|5\": {\n \"acc\": 0.23,\n \"acc_stderr\": 0.04229525846816506,\n \"acc_norm\": 0.23,\n \"acc_norm_stderr\": 0.04229525846816506\n },\n \"harness|hendrycksTest-college_mathematics|5\": {\n \"acc\": 0.27,\n \"acc_stderr\": 0.044619604333847394,\n \"acc_norm\": 0.27,\n \"acc_norm_stderr\": 0.044619604333847394\n },\n \"harness|hendrycksTest-college_medicine|5\": {\n \"acc\": 0.26011560693641617,\n \"acc_stderr\": 0.033450369167889904,\n \"acc_norm\": 0.26011560693641617,\n \"acc_norm_stderr\": 0.033450369167889904\n },\n \"harness|hendrycksTest-college_physics|5\": {\n \"acc\": 0.24509803921568626,\n \"acc_stderr\": 0.042801058373643966,\n \"acc_norm\": 0.24509803921568626,\n \"acc_norm_stderr\": 0.042801058373643966\n },\n \"harness|hendrycksTest-computer_security|5\": {\n \"acc\": 0.37,\n \"acc_stderr\": 0.04852365870939099,\n \"acc_norm\": 0.37,\n \"acc_norm_stderr\": 0.04852365870939099\n },\n \"harness|hendrycksTest-conceptual_physics|5\": {\n \"acc\": 0.37872340425531914,\n \"acc_stderr\": 0.03170995606040655,\n \"acc_norm\": 0.37872340425531914,\n \"acc_norm_stderr\": 0.03170995606040655\n },\n \"harness|hendrycksTest-econometrics|5\": {\n \"acc\": 0.2719298245614035,\n \"acc_stderr\": 0.04185774424022057,\n \"acc_norm\": 0.2719298245614035,\n \"acc_norm_stderr\": 0.04185774424022057\n },\n \"harness|hendrycksTest-electrical_engineering|5\": {\n \"acc\": 0.36551724137931035,\n \"acc_stderr\": 0.040131241954243856,\n \"acc_norm\": 0.36551724137931035,\n \"acc_norm_stderr\": 0.040131241954243856\n },\n \"harness|hendrycksTest-elementary_mathematics|5\": {\n \"acc\": 0.24603174603174602,\n \"acc_stderr\": 0.022182037202948365,\n \"acc_norm\": 0.24603174603174602,\n \"acc_norm_stderr\": 0.022182037202948365\n },\n \"harness|hendrycksTest-formal_logic|5\": {\n \"acc\": 0.23809523809523808,\n \"acc_stderr\": 0.03809523809523811,\n \"acc_norm\": 0.23809523809523808,\n \"acc_norm_stderr\": 0.03809523809523811\n },\n \"harness|hendrycksTest-global_facts|5\": {\n \"acc\": 0.33,\n \"acc_stderr\": 0.047258156262526045,\n \"acc_norm\": 0.33,\n \"acc_norm_stderr\": 0.047258156262526045\n },\n \"harness|hendrycksTest-high_school_biology|5\": {\n \"acc\": 0.38387096774193546,\n \"acc_stderr\": 0.027666182075539638,\n \"acc_norm\": 0.38387096774193546,\n \"acc_norm_stderr\": 0.027666182075539638\n },\n \"harness|hendrycksTest-high_school_chemistry|5\": {\n \"acc\": 0.33004926108374383,\n \"acc_stderr\": 0.03308530426228258,\n \"acc_norm\": 0.33004926108374383,\n \"acc_norm_stderr\": 0.03308530426228258\n },\n \"harness|hendrycksTest-high_school_computer_science|5\": {\n \"acc\": 0.35,\n \"acc_stderr\": 0.0479372485441102,\n \"acc_norm\": 0.35,\n \"acc_norm_stderr\": 0.0479372485441102\n },\n \"harness|hendrycksTest-high_school_european_history|5\": {\n \"acc\": 0.3090909090909091,\n \"acc_stderr\": 0.036085410115739666,\n \"acc_norm\": 0.3090909090909091,\n \"acc_norm_stderr\": 0.036085410115739666\n },\n \"harness|hendrycksTest-high_school_geography|5\": {\n \"acc\": 0.4292929292929293,\n \"acc_stderr\": 0.035265527246011986,\n \"acc_norm\": 0.4292929292929293,\n \"acc_norm_stderr\": 0.035265527246011986\n },\n \"harness|hendrycksTest-high_school_government_and_politics|5\": {\n \"acc\": 0.41968911917098445,\n \"acc_stderr\": 0.035615873276858834,\n \"acc_norm\": 0.41968911917098445,\n \"acc_norm_stderr\": 0.035615873276858834\n },\n \"harness|hendrycksTest-high_school_macroeconomics|5\": {\n \"acc\": 0.3487179487179487,\n \"acc_stderr\": 0.02416278028401772,\n \"acc_norm\": 0.3487179487179487,\n \"acc_norm_stderr\": 0.02416278028401772\n },\n \"harness|hendrycksTest-high_school_mathematics|5\": {\n \"acc\": 0.27037037037037037,\n \"acc_stderr\": 0.027080372815145665,\n \"acc_norm\": 0.27037037037037037,\n \"acc_norm_stderr\": 0.027080372815145665\n },\n \"harness|hendrycksTest-high_school_microeconomics|5\": {\n \"acc\": 0.3739495798319328,\n \"acc_stderr\": 0.031429466378837076,\n \"acc_norm\": 0.3739495798319328,\n \"acc_norm_stderr\": 0.031429466378837076\n },\n \"harness|hendrycksTest-high_school_physics|5\": {\n \"acc\": 0.2251655629139073,\n \"acc_stderr\": 0.03410435282008936,\n \"acc_norm\": 0.2251655629139073,\n \"acc_norm_stderr\": 0.03410435282008936\n },\n \"harness|hendrycksTest-high_school_psychology|5\": {\n \"acc\": 0.3596330275229358,\n \"acc_stderr\": 0.020575234660123787,\n \"acc_norm\": 0.3596330275229358,\n \"acc_norm_stderr\": 0.020575234660123787\n },\n \"harness|hendrycksTest-high_school_statistics|5\": {\n \"acc\": 0.25462962962962965,\n \"acc_stderr\": 0.029711275860005344,\n \"acc_norm\": 0.25462962962962965,\n \"acc_norm_stderr\": 0.029711275860005344\n },\n \"harness|hendrycksTest-high_school_us_history|5\": {\n \"acc\": 0.35784313725490197,\n \"acc_stderr\": 0.03364487286088298,\n \"acc_norm\": 0.35784313725490197,\n \"acc_norm_stderr\": 0.03364487286088298\n },\n \"harness|hendrycksTest-high_school_world_history|5\": {\n \"acc\": 0.4219409282700422,\n \"acc_stderr\": 0.032148146302403695,\n \"acc_norm\": 0.4219409282700422,\n \"acc_norm_stderr\": 0.032148146302403695\n },\n \"harness|hendrycksTest-human_aging|5\": {\n \"acc\": 0.42152466367713004,\n \"acc_stderr\": 0.03314190222110658,\n \"acc_norm\": 0.42152466367713004,\n \"acc_norm_stderr\": 0.03314190222110658\n },\n \"harness|hendrycksTest-human_sexuality|5\": {\n \"acc\": 0.44274809160305345,\n \"acc_stderr\": 0.043564472026650695,\n \"acc_norm\": 0.44274809160305345,\n \"acc_norm_stderr\": 0.043564472026650695\n },\n \"harness|hendrycksTest-international_law|5\": {\n \"acc\": 0.45454545454545453,\n \"acc_stderr\": 0.045454545454545456,\n \"acc_norm\": 0.45454545454545453,\n \"acc_norm_stderr\": 0.045454545454545456\n },\n \"harness|hendrycksTest-jurisprudence|5\": {\n \"acc\": 0.37037037037037035,\n \"acc_stderr\": 0.04668408033024932,\n \"acc_norm\": 0.37037037037037035,\n \"acc_norm_stderr\": 0.04668408033024932\n },\n \"harness|hendrycksTest-logical_fallacies|5\": {\n \"acc\": 0.3374233128834356,\n \"acc_stderr\": 0.03714908409935575,\n \"acc_norm\": 0.3374233128834356,\n \"acc_norm_stderr\": 0.03714908409935575\n },\n \"harness|hendrycksTest-machine_learning|5\": {\n \"acc\": 0.29464285714285715,\n \"acc_stderr\": 0.0432704093257873,\n \"acc_norm\": 0.29464285714285715,\n \"acc_norm_stderr\": 0.0432704093257873\n },\n \"harness|hendrycksTest-management|5\": {\n \"acc\": 0.4077669902912621,\n \"acc_stderr\": 0.048657775704107675,\n \"acc_norm\": 0.4077669902912621,\n \"acc_norm_stderr\": 0.048657775704107675\n },\n \"harness|hendrycksTest-marketing|5\": {\n \"acc\": 0.5,\n \"acc_stderr\": 0.03275608910402091,\n \"acc_norm\": 0.5,\n \"acc_norm_stderr\": 0.03275608910402091\n },\n \"harness|hendrycksTest-medical_genetics|5\": {\n \"acc\": 0.34,\n \"acc_stderr\": 0.04760952285695235,\n \"acc_norm\": 0.34,\n \"acc_norm_stderr\": 0.04760952285695235\n },\n \"harness|hendrycksTest-miscellaneous|5\": {\n \"acc\": 0.38697318007662834,\n \"acc_stderr\": 0.017417138059440136,\n \"acc_norm\": 0.38697318007662834,\n \"acc_norm_stderr\": 0.017417138059440136\n },\n \"harness|hendrycksTest-moral_disputes|5\": {\n \"acc\": 0.34104046242774566,\n \"acc_stderr\": 0.025522474632121612,\n \"acc_norm\": 0.34104046242774566,\n \"acc_norm_stderr\": 0.025522474632121612\n },\n \"harness|hendrycksTest-moral_scenarios|5\": {\n \"acc\": 0.2424581005586592,\n \"acc_stderr\": 0.014333522059217889,\n \"acc_norm\": 0.2424581005586592,\n \"acc_norm_stderr\": 0.014333522059217889\n },\n \"harness|hendrycksTest-nutrition|5\": {\n \"acc\": 0.369281045751634,\n \"acc_stderr\": 0.02763417668960266,\n \"acc_norm\": 0.369281045751634,\n \"acc_norm_stderr\": 0.02763417668960266\n },\n \"harness|hendrycksTest-philosophy|5\": {\n \"acc\": 0.3633440514469453,\n \"acc_stderr\": 0.027316847674192717,\n \"acc_norm\": 0.3633440514469453,\n \"acc_norm_stderr\": 0.027316847674192717\n },\n \"harness|hendrycksTest-prehistory|5\": {\n \"acc\": 0.3765432098765432,\n \"acc_stderr\": 0.026959344518747787,\n \"acc_norm\": 0.3765432098765432,\n \"acc_norm_stderr\": 0.026959344518747787\n },\n \"harness|hendrycksTest-professional_accounting|5\": {\n \"acc\": 0.2907801418439716,\n \"acc_stderr\": 0.027090664368353178,\n \"acc_norm\": 0.2907801418439716,\n \"acc_norm_stderr\": 0.027090664368353178\n },\n \"harness|hendrycksTest-professional_law|5\": {\n \"acc\": 0.2861799217731421,\n \"acc_stderr\": 0.011543642878150757,\n \"acc_norm\": 0.2861799217731421,\n \"acc_norm_stderr\": 0.011543642878150757\n },\n \"harness|hendrycksTest-professional_medicine|5\": {\n \"acc\": 0.27205882352941174,\n \"acc_stderr\": 0.02703304115168146,\n \"acc_norm\": 0.27205882352941174,\n \"acc_norm_stderr\": 0.02703304115168146\n },\n \"harness|hendrycksTest-professional_psychology|5\": {\n \"acc\": 0.31209150326797386,\n \"acc_stderr\": 0.01874501120127766,\n \"acc_norm\": 0.31209150326797386,\n \"acc_norm_stderr\": 0.01874501120127766\n },\n \"harness|hendrycksTest-public_relations|5\": {\n \"acc\": 0.4,\n \"acc_stderr\": 0.0469237132203465,\n \"acc_norm\": 0.4,\n \"acc_norm_stderr\": 0.0469237132203465\n },\n \"harness|hendrycksTest-security_studies|5\": {\n \"acc\": 0.3469387755102041,\n \"acc_stderr\": 0.030472526026726492,\n \"acc_norm\": 0.3469387755102041,\n \"acc_norm_stderr\": 0.030472526026726492\n },\n \"harness|hendrycksTest-sociology|5\": {\n \"acc\": 0.4079601990049751,\n \"acc_stderr\": 0.034751163651940926,\n \"acc_norm\": 0.4079601990049751,\n \"acc_norm_stderr\": 0.034751163651940926\n },\n \"harness|hendrycksTest-us_foreign_policy|5\": {\n \"acc\": 0.48,\n \"acc_stderr\": 0.050211673156867795,\n \"acc_norm\": 0.48,\n \"acc_norm_stderr\": 0.050211673156867795\n },\n \"harness|hendrycksTest-virology|5\": {\n \"acc\": 0.39759036144578314,\n \"acc_stderr\": 0.038099730845402184,\n \"acc_norm\": 0.39759036144578314,\n \"acc_norm_stderr\": 0.038099730845402184\n },\n \"harness|hendrycksTest-world_religions|5\": {\n \"acc\": 0.3157894736842105,\n \"acc_stderr\": 0.03565079670708311,\n \"acc_norm\": 0.3157894736842105,\n \"acc_norm_stderr\": 0.03565079670708311\n },\n \"harness|truthfulqa:mc|0\": {\n \"mc1\": 0.2423500611995104,\n \"mc1_stderr\": 0.015000674373570347,\n \"mc2\": 0.39049431751977004,\n \"mc2_stderr\": 0.01530713715267398\n },\n \"harness|winogrande|5\": {\n \"acc\": 0.5714285714285714,\n \"acc_stderr\": 0.013908353814606679\n },\n \"harness|gsm8k|5\": {\n \"acc\": 0.0,\n \"acc_stderr\": 0.0\n }\n}\n```", "repo_url": "https://huggingface.co/KnutJaegersberg/Deacon-1_8b", "leaderboard_url": "https://huggingface.co/spaces/HuggingFaceH4/open_llm_leaderboard", "point_of_contact": "[email protected]", "configs": [{"config_name": "harness_arc_challenge_25", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|arc:challenge|25_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|arc:challenge|25_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_gsm8k_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|gsm8k|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|gsm8k|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hellaswag_10", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hellaswag|10_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hellaswag|10_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-management|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-virology|5_2024-01-04T15-20-39.040351.parquet", "**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_abstract_algebra_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-abstract_algebra|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_anatomy_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-anatomy|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_astronomy_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-astronomy|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_business_ethics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-business_ethics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_clinical_knowledge_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-clinical_knowledge|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_biology_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_biology|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_chemistry_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_chemistry|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_computer_science_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_computer_science|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_mathematics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_mathematics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_medicine_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_medicine|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_college_physics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-college_physics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_computer_security_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-computer_security|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_conceptual_physics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-conceptual_physics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_econometrics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-econometrics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_electrical_engineering_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-electrical_engineering|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_elementary_mathematics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-elementary_mathematics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_formal_logic_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-formal_logic|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_global_facts_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-global_facts|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_biology_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_biology|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_chemistry_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_chemistry|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_computer_science_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_computer_science|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_european_history_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_european_history|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_geography_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_geography|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_government_and_politics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_government_and_politics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_macroeconomics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_macroeconomics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_mathematics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_mathematics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_microeconomics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_microeconomics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_physics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_physics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_psychology_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_psychology|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_statistics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_statistics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_us_history_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_us_history|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_high_school_world_history_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-high_school_world_history|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_aging_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_aging|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_human_sexuality_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-human_sexuality|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_international_law_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-international_law|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_jurisprudence_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-jurisprudence|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_logical_fallacies_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-logical_fallacies|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_machine_learning_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-machine_learning|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_management_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-management|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_marketing_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-marketing|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_medical_genetics_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-medical_genetics|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_miscellaneous_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-miscellaneous|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_disputes_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_disputes|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_moral_scenarios_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-moral_scenarios|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_nutrition_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-nutrition|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_philosophy_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-philosophy|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_prehistory_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-prehistory|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_accounting_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_accounting|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_law_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_law|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_medicine_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_medicine|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_professional_psychology_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-professional_psychology|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_public_relations_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-public_relations|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_security_studies_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-security_studies|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_sociology_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-sociology|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_us_foreign_policy_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-us_foreign_policy|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_virology_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-virology|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_hendrycksTest_world_religions_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|hendrycksTest-world_religions|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_truthfulqa_mc_0", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|truthfulqa:mc|0_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "harness_winogrande_5", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["**/details_harness|winogrande|5_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["**/details_harness|winogrande|5_2024-01-04T15-20-39.040351.parquet"]}]}, {"config_name": "results", "data_files": [{"split": "2024_01_04T15_20_39.040351", "path": ["results_2024-01-04T15-20-39.040351.parquet"]}, {"split": "latest", "path": ["results_2024-01-04T15-20-39.040351.parquet"]}]}]}
2024-01-04T15:23:10+00:00
[]
[]
TAGS #region-us
# Dataset Card for Evaluation run of KnutJaegersberg/Deacon-1_8b Dataset automatically created during the evaluation run of model KnutJaegersberg/Deacon-1_8b on the Open LLM Leaderboard. The dataset is composed of 63 configuration, each one coresponding to one of the evaluated task. The dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The "train" split is always pointing to the latest results. An additional configuration "results" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard). To load the details from a run, you can for instance do the following: ## Latest results These are the latest results from run 2024-01-04T15:20:39.040351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the "latest" split for each eval): ## Dataset Details ### Dataset Description - Curated by: - Funded by [optional]: - Shared by [optional]: - Language(s) (NLP): - License: ### Dataset Sources [optional] - Repository: - Paper [optional]: - Demo [optional]: ## Uses ### Direct Use ### Out-of-Scope Use ## Dataset Structure ## Dataset Creation ### Curation Rationale ### Source Data #### Data Collection and Processing #### Who are the source data producers? ### Annotations [optional] #### Annotation process #### Who are the annotators? #### Personal and Sensitive Information ## Bias, Risks, and Limitations ### Recommendations Users should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations. [optional] BibTeX: APA: ## Glossary [optional] ## More Information [optional] ## Dataset Card Authors [optional] ## Dataset Card Contact
[ "# Dataset Card for Evaluation run of KnutJaegersberg/Deacon-1_8b\n\n\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/Deacon-1_8b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T15:20:39.040351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ "TAGS\n#region-us \n", "# Dataset Card for Evaluation run of KnutJaegersberg/Deacon-1_8b\n\n\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/Deacon-1_8b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:", "## Latest results\n\nThese are the latest results from run 2024-01-04T15:20:39.040351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):", "## Dataset Details", "### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:", "### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:", "## Uses", "### Direct Use", "### Out-of-Scope Use", "## Dataset Structure", "## Dataset Creation", "### Curation Rationale", "### Source Data", "#### Data Collection and Processing", "#### Who are the source data producers?", "### Annotations [optional]", "#### Annotation process", "#### Who are the annotators?", "#### Personal and Sensitive Information", "## Bias, Risks, and Limitations", "### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:", "## Glossary [optional]", "## More Information [optional]", "## Dataset Card Authors [optional]", "## Dataset Card Contact" ]
[ 6, 187, 68, 4, 40, 29, 3, 4, 9, 6, 5, 7, 4, 7, 10, 9, 5, 9, 8, 10, 46, 8, 7, 10, 5 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for Evaluation run of KnutJaegersberg/Deacon-1_8b\n\n\n\nDataset automatically created during the evaluation run of model KnutJaegersberg/Deacon-1_8b on the Open LLM Leaderboard.\n\nThe dataset is composed of 63 configuration, each one coresponding to one of the evaluated task.\n\nThe dataset has been created from 1 run(s). Each run can be found as a specific split in each configuration, the split being named using the timestamp of the run.The \"train\" split is always pointing to the latest results.\n\nAn additional configuration \"results\" store all the aggregated results of the run (and is used to compute and display the aggregated metrics on the Open LLM Leaderboard).\n\nTo load the details from a run, you can for instance do the following:## Latest results\n\nThese are the latest results from run 2024-01-04T15:20:39.040351(note that their might be results for other tasks in the repos if successive evals didn't cover the same tasks. You find each in the results and the \"latest\" split for each eval):## Dataset Details### Dataset Description\n\n\n\n\n\n- Curated by: \n- Funded by [optional]: \n- Shared by [optional]: \n- Language(s) (NLP): \n- License:### Dataset Sources [optional]\n\n\n\n- Repository: \n- Paper [optional]: \n- Demo [optional]:## Uses### Direct Use### Out-of-Scope Use## Dataset Structure## Dataset Creation### Curation Rationale### Source Data#### Data Collection and Processing#### Who are the source data producers?### Annotations [optional]#### Annotation process#### Who are the annotators?#### Personal and Sensitive Information## Bias, Risks, and Limitations### Recommendations\n\n\n\nUsers should be made aware of the risks, biases and limitations of the dataset. More information needed for further recommendations.\n\n[optional]\n\n\n\nBibTeX:\n\n\n\nAPA:## Glossary [optional]## More Information [optional]## Dataset Card Authors [optional]## Dataset Card Contact" ]
0d45faec86cf31ac7e1bd9485fa3cab0b8f36322
# Dataset Card for "pairwise_classification_synthetic_gpt4" [More Information needed](https://github.com/huggingface/datasets/blob/main/CONTRIBUTING.md#how-to-contribute-to-the-dataset-cards)
Xapien/pairwise_classification_synthetic_gpt4
[ "region:us" ]
2024-01-04T16:26:18+00:00
{"dataset_info": {"features": [{"name": "name", "dtype": "string"}, {"name": "summary_a", "dtype": "string"}, {"name": "same_entity_summary", "dtype": "string"}, {"name": "different_entity_summary", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 3650249, "num_examples": 9755}], "download_size": 0, "dataset_size": 3650249}, "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}]}
2024-01-05T15:04:37+00:00
[]
[]
TAGS #region-us
# Dataset Card for "pairwise_classification_synthetic_gpt4" More Information needed
[ "# Dataset Card for \"pairwise_classification_synthetic_gpt4\"\n\nMore Information needed" ]
[ "TAGS\n#region-us \n", "# Dataset Card for \"pairwise_classification_synthetic_gpt4\"\n\nMore Information needed" ]
[ 6, 23 ]
[ "passage: TAGS\n#region-us \n# Dataset Card for \"pairwise_classification_synthetic_gpt4\"\n\nMore Information needed" ]
4716d5ff000f85a0b2772be5771530035f5ca059
# Dataset Card for "databricks-dolly-15k-hi" This dataset was created by splitting data in [dolly-15k](https://huggingface.co/datasets/databricks/databricks-dolly-15k) dataset into sentences and then translating them using [NLLB-200-3.3B](https://huggingface.co/facebook/nllb-200-3.3B) model.
kpriyanshu256/databricks-dolly-15k-hi
[ "size_categories:10K<n<100K", "language:hi", "region:us" ]
2024-01-04T16:49:25+00:00
{"language": ["hi"], "size_categories": ["10K<n<100K"], "configs": [{"config_name": "default", "data_files": [{"split": "train", "path": "data/train-*"}]}], "dataset_info": {"features": [{"name": "instruction", "dtype": "string"}, {"name": "context", "dtype": "string"}, {"name": "response", "dtype": "string"}, {"name": "category", "dtype": "string"}], "splits": [{"name": "train", "num_bytes": 30106504, "num_examples": 15011}], "download_size": 11723675, "dataset_size": 30106504}}
2024-01-05T05:58:52+00:00
[]
[ "hi" ]
TAGS #size_categories-10K<n<100K #language-Hindi #region-us
# Dataset Card for "databricks-dolly-15k-hi" This dataset was created by splitting data in dolly-15k dataset into sentences and then translating them using NLLB-200-3.3B model.
[ "# Dataset Card for \"databricks-dolly-15k-hi\"\n\nThis dataset was created by splitting data in dolly-15k dataset into sentences and then translating them using NLLB-200-3.3B model." ]
[ "TAGS\n#size_categories-10K<n<100K #language-Hindi #region-us \n", "# Dataset Card for \"databricks-dolly-15k-hi\"\n\nThis dataset was created by splitting data in dolly-15k dataset into sentences and then translating them using NLLB-200-3.3B model." ]
[ 22, 52 ]
[ "passage: TAGS\n#size_categories-10K<n<100K #language-Hindi #region-us \n# Dataset Card for \"databricks-dolly-15k-hi\"\n\nThis dataset was created by splitting data in dolly-15k dataset into sentences and then translating them using NLLB-200-3.3B model." ]